diff --git a/.gitattributes b/.gitattributes
index a6344aac8c09253b3b630fb776ae94478aa0275b..318f90b5e6dcdb746763edb2a2c2df1e0f101aa2 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text
*tfevents* filter=lfs diff=lfs merge=lfs -text
+stt_ar_fastconformer_hybrid_large_pcd_v1.0.nemo filter=lfs diff=lfs merge=lfs -text
+whisper_checkpoints/models--openai--whisper-large-v2/blobs/57a1ba2a82c093cabff2541409ae778c97145378b9ddfa722763cb1cb8f9020b filter=lfs diff=lfs merge=lfs -text
diff --git a/aqib-nemo-asr.py b/aqib-nemo-asr.py
new file mode 100644
index 0000000000000000000000000000000000000000..d649129dd339810159a0dd616b902d8cd6c6d305
--- /dev/null
+++ b/aqib-nemo-asr.py
@@ -0,0 +1,601 @@
+import asyncio
+import websockets
+import json
+import threading
+import numpy as np
+import logging
+import time
+import tempfile
+import os
+import re
+from concurrent.futures import ThreadPoolExecutor
+import nemo.collections.asr as nemo_asr
+import soundfile as sf
+
+# Set up logging
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+
+
+# ===== Arabic number mapping (expanded) =====
+arabic_numbers = {
+ # Basic digits
+ "صفر": "0", "زيرو": "0", "٠": "0","زيو": "0","زير": "0",
+ "واحد": "1", "واحدة": "1", "١": "1",
+ "اتنين": "2", "اثنين": "2", "إثنين": "2", "اثنان": "2", "إثنان": "2", "٢": "2",
+ "تلاتة": "3", "ثلاثة": "3", "٣": "3",
+ "اربعة": "4", "أربعة": "4", "٤": "4",
+ "خمسة": "5", "٥": "5",
+ "ستة": "6", "٦": "6",
+ "سبعة": "7", "٧": "7",
+ "تمانية": "8", "ثمانية": "8", "٨": "8",
+ "تسعة": "9", "٩": "9",
+
+ # Teens
+ "عشرة": "10", "١٠": "10",
+ "حداشر": "11", "احد عشر": "11","احداشر": "11",
+ "اتناشر": "12", "اثنا عشر": "12",
+ "تلتاشر": "13", "ثلاثة عشر": "13",
+ "اربعتاشر": "14", "أربعة عشر": "14",
+ "خمستاشر": "15", "خمسة عشر": "15",
+ "ستاشر": "16", "ستة عشر": "16",
+ "سبعتاشر": "17", "سبعة عشر": "17",
+ "طمنتاشر": "18", "ثمانية عشر": "18",
+ "تسعتاشر": "19", "تسعة عشر": "19",
+
+ # Tens
+ "عشرين": "20", "٢٠": "20",
+ "تلاتين": "30", "ثلاثين": "30", "٣٠": "30",
+ "اربعين": "40", "أربعين": "40", "٤٠": "40",
+ "خمسين": "50", "٥٠": "50",
+ "ستين": "60", "٦٠": "60",
+ "سبعين": "70", "٧٠": "70",
+ "تمانين": "80", "ثمانين": "80", "٨٠": "80","تمانون": "80","ثمانون": "80",
+ "تسعين": "90", "٩٠": "90",
+
+ # Hundreds
+ "مية": "100", "مائة": "100", "مئة": "100", "١٠٠": "100",
+ "ميتين": "200", "مائتين": "200",
+ "تلاتمية": "300", "ثلاثمائة": "300",
+ "اربعمية": "400", "أربعمائة": "400",
+ "خمسمية": "500", "خمسمائة": "500",
+ "ستمية": "600", "ستمائة": "600",
+ "سبعمية": "700", "سبعمائة": "700",
+ "تمانمية": "800", "ثمانمائة": "800",
+ "تسعمية": "900", "تسعمائة": "900",
+
+ # Thousands
+ "ألف": "1000", "الف": "1000", "١٠٠٠": "1000",
+ "ألفين": "2000", "الفين": "2000",
+ "تلات تلاف": "3000", "ثلاثة آلاف": "3000",
+ "اربعة آلاف": "4000", "أربعة آلاف": "4000",
+ "خمسة آلاف": "5000",
+ "ستة آلاف": "6000",
+ "سبعة آلاف": "7000",
+ "تمانية آلاف": "8000", "ثمانية آلاف": "8000",
+ "تسعة آلاف": "9000",
+
+ # Large numbers
+ "عشرة آلاف": "10000",
+ "مية ألف": "100000", "مائة ألف": "100000",
+ "مليون": "1000000", "١٠٠٠٠٠٠": "1000000",
+ "ملايين": "1000000",
+ "مليار": "1000000000", "١٠٠٠٠٠٠٠٠٠": "1000000000"
+}
+
+def replace_arabic_numbers(text: str) -> str:
+ for word, digit in arabic_numbers.items():
+ text = re.sub(rf"\b{word}\b", digit, text)
+ return text
+
+
+# Global NeMo model
+asr_model = None
+
+def initialize_nemo_model():
+ """Initialize NeMo FastConformer model"""
+ global asr_model
+
+ logger.info("Loading NeMo FastConformer Arabic ASR model...")
+
+ # Model path - adjust this to your model location
+ model_path = os.getenv(
+ "NEMO_MODEL_PATH",
+ "/path/to/stt_ar_fastconformer_hybrid_large_pcd_v1.0.nemo" # Update this path
+ )
+
+ if not os.path.exists(model_path):
+ logger.error(f"Model not found at: {model_path}")
+ logger.info("Please download the model from: https://catalog.ngc.nvidia.com/orgs/nvidia/teams/nemo/models/stt_ar_fastconformer_hybrid_large_pcd")
+ raise FileNotFoundError(f"NeMo model not found: {model_path}")
+
+ try:
+ asr_model = nemo_asr.models.EncDecCTCModel.restore_from(model_path)
+ logger.info("NeMo FastConformer model loaded successfully")
+
+ # Set model to eval mode for inference
+ asr_model.eval()
+
+ except Exception as e:
+ logger.error(f"Failed to load NeMo model: {e}")
+ raise
+
+# Initialize model on startup
+initialize_nemo_model()
+
+# Thread pool for processing
+executor = ThreadPoolExecutor(max_workers=4)
+
+class JambonzAudioBuffer:
+ def __init__(self, sample_rate=8000, chunk_duration=1.0):
+ self.sample_rate = sample_rate
+ self.chunk_duration = chunk_duration
+ self.chunk_samples = int(chunk_duration * sample_rate)
+
+ self.buffer = np.array([], dtype=np.float32)
+ self.lock = threading.Lock()
+ self.total_audio = np.array([], dtype=np.float32)
+
+ # Voice Activity Detection
+ self.silence_threshold = 0.05
+ self.min_speech_samples = int(0.5 * sample_rate)
+
+ def add_audio(self, audio_data):
+ with self.lock:
+ self.buffer = np.concatenate([self.buffer, audio_data])
+ self.total_audio = np.concatenate([self.total_audio, audio_data])
+
+ def has_chunk_ready(self):
+ with self.lock:
+ return len(self.buffer) >= self.chunk_samples
+
+ def is_speech(self, audio_chunk):
+ """Simple VAD based on energy"""
+ if len(audio_chunk) < self.min_speech_samples:
+ return False
+ energy = np.mean(np.abs(audio_chunk))
+ return energy > self.silence_threshold
+
+ def get_chunk_for_processing(self):
+ """Get audio chunk for processing"""
+ with self.lock:
+ if len(self.buffer) < self.chunk_samples:
+ return None
+ return np.array([1]) # Signal that chunk is ready
+
+ def get_all_audio(self):
+ """Get all accumulated audio"""
+ with self.lock:
+ return self.total_audio.copy()
+
+ def clear(self):
+ with self.lock:
+ self.buffer = np.array([], dtype=np.float32)
+ self.total_audio = np.array([], dtype=np.float32)
+
+ def reset_for_new_segment(self):
+ """Reset buffers for new transcription segment"""
+ with self.lock:
+ self.buffer = np.array([], dtype=np.float32)
+ self.total_audio = np.array([], dtype=np.float32)
+
+def linear16_to_audio(audio_bytes, sample_rate=8000):
+ """Convert LINEAR16 PCM bytes to numpy array"""
+ try:
+ audio_array = np.frombuffer(audio_bytes, dtype=np.int16)
+ audio_array = audio_array.astype(np.float32) / 32768.0
+ return audio_array
+ except Exception as e:
+ logger.error(f"Error converting LINEAR16 to audio: {e}")
+ return np.array([], dtype=np.float32)
+
+def resample_audio(audio_data, source_rate, target_rate):
+ """Resample audio to target sample rate"""
+ if source_rate == target_rate:
+ return audio_data
+
+ if source_rate == 8000 and target_rate == 16000:
+ # Simple 2x upsampling for common case
+ upsampled = np.repeat(audio_data, 2)
+ return upsampled.astype(np.float32)
+
+ # Fallback: Linear interpolation resampling
+ ratio = target_rate / source_rate
+ indices = np.arange(0, len(audio_data), 1/ratio)
+ indices = indices[indices < len(audio_data)]
+ resampled = np.interp(indices, np.arange(len(audio_data)), audio_data)
+
+ return resampled.astype(np.float32)
+
+def transcribe_with_nemo(audio_data, source_sample_rate=8000, target_sample_rate=16000):
+ """Transcribe audio using NeMo FastConformer"""
+ try:
+ if len(audio_data) == 0:
+ return ""
+
+ # Resample to 16kHz (NeMo models typically expect 16kHz)
+ resampled_audio = resample_audio(audio_data, 8000, 16000)
+
+ # Skip very short audio
+ min_samples = int(0.3 * 16000)
+ if len(resampled_audio) < min_samples:
+ return ""
+
+ start_time = time.time()
+
+ # Save audio to temporary file (NeMo expects file path)
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".wav") as tmp_file:
+ # Write audio as WAV file
+ sf.write(tmp_file.name, resampled_audio, target_sample_rate)
+ tmp_path = tmp_file.name
+
+ try:
+ # Transcribe with NeMo
+ result = asr_model.transcribe([tmp_path])
+
+ # Debug logging to understand result format
+ logger.info(f"NeMo result type: {type(result)}")
+ if result and len(result) > 0:
+ logger.info(f"First result type: {type(result[0])}")
+ logger.info(f"First result content: {result[0]}")
+
+ if result and len(result) > 0:
+ # Handle different NeMo result formats
+ if hasattr(result[0], 'text'):
+ # If result has .text attribute (newer NeMo versions)
+ raw_text = result[0].text
+ logger.info(f"Using .text attribute: {raw_text}")
+ elif isinstance(result[0], str):
+ # If result is directly a string
+ raw_text = result[0]
+ logger.info(f"Using direct string: {raw_text}")
+ else:
+ # If result is some other format, convert to string
+ raw_text = str(result[0])
+ logger.info(f"Using str() conversion: {raw_text}")
+
+ # Ensure raw_text is a string before processing
+ if not isinstance(raw_text, str):
+ raw_text = str(raw_text)
+
+ # Only process if we have actual text content
+ if raw_text and raw_text.strip():
+ # Convert Arabic numbers to digits
+
+ logger.info(f"before sending to FXN--- {raw_text}")
+ cleaned_text = replace_arabic_numbers(raw_text)
+ logger.info(f"after FXN--- {cleaned_text}")
+ end_time = time.time()
+
+ if cleaned_text.strip():
+ logger.info(f"NeMo transcription: '{cleaned_text}' (processed in {end_time - start_time:.2f}s)")
+
+ return cleaned_text.strip()
+ else:
+ logger.info("No transcription text found")
+ return ""
+ else:
+ logger.info("No results from NeMo transcription")
+ return ""
+
+ finally:
+ # Clean up temporary file
+ if os.path.exists(tmp_path):
+ os.remove(tmp_path)
+
+ except Exception as e:
+ logger.error(f"Error during NeMo transcription: {e}")
+ return ""
+
+class JambonzSTTHandler:
+ def __init__(self, websocket):
+ self.websocket = websocket
+ self.audio_buffer = None
+ self.config = {}
+ self.running = False
+ self.transcription_task = None
+
+ # Auto-final detection variables
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.silence_timeout = 2.0
+ self.min_interim_count = 2
+ self.auto_final_task = None
+ self.accumulated_transcript = ""
+ self.final_sent = False
+ self.segment_number = 0
+ self.last_partial = ""
+
+ # Processing tracking
+ self.processing_count = 0
+
+ async def start_processing(self, start_message):
+ """Initialize with start message from jambonz"""
+ self.config = {
+ "language": start_message.get("language", "ar-EG"),
+ "format": start_message.get("format", "raw"),
+ "encoding": start_message.get("encoding", "LINEAR16"),
+ "sample_rate": start_message.get("sampleRateHz", 8000),
+ "interim_results": True, # Always enable for internal processing
+ "options": start_message.get("options", {})
+ }
+
+ logger.info(f"NeMo STT session started with config: {self.config}")
+
+ # Initialize audio buffer
+ self.audio_buffer = JambonzAudioBuffer(
+ sample_rate=self.config["sample_rate"],
+ chunk_duration=1.0 # 1 second chunks for NeMo
+ )
+
+ # Reset session variables
+ self.running = True
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.accumulated_transcript = ""
+ self.final_sent = False
+ self.segment_number = 0
+ self.processing_count = 0
+ self.last_partial = ""
+
+ # Start background transcription task
+ self.transcription_task = asyncio.create_task(self._process_audio_chunks())
+
+ # Start auto-final detection task
+ self.auto_final_task = asyncio.create_task(self._monitor_for_auto_final())
+
+ async def stop_processing(self):
+ """Stop current processing session"""
+ logger.info("Stopping NeMo STT session...")
+ self.running = False
+
+ # Cancel background tasks
+ for task in [self.transcription_task, self.auto_final_task]:
+ if task:
+ task.cancel()
+ try:
+ await task
+ except asyncio.CancelledError:
+ pass
+
+ # Send final transcription if not already sent
+ if not self.final_sent and self.accumulated_transcript.strip():
+ await self.send_transcription(self.accumulated_transcript, is_final=True)
+
+ # Process any remaining audio for comprehensive final transcription
+ if self.audio_buffer:
+ all_audio = self.audio_buffer.get_all_audio()
+ if len(all_audio) > 0 and not self.final_sent:
+ loop = asyncio.get_event_loop()
+ final_transcription = await loop.run_in_executor(
+ executor,
+ transcribe_with_nemo,
+ all_audio,
+ self.config["sample_rate"]
+ )
+
+ if final_transcription.strip():
+ await self.send_transcription(final_transcription, is_final=True)
+
+ # Clear audio buffer
+ if self.audio_buffer:
+ self.audio_buffer.clear()
+
+ logger.info("NeMo STT session stopped")
+
+ async def start_new_segment(self):
+ """Start a new transcription segment"""
+ self.segment_number += 1
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.accumulated_transcript = ""
+ self.final_sent = False
+ self.last_partial = ""
+ self.processing_count = 0
+
+ if self.audio_buffer:
+ self.audio_buffer.reset_for_new_segment()
+
+ logger.info(f"Started new transcription segment #{self.segment_number}")
+
+ async def add_audio_data(self, audio_bytes):
+ """Add audio data to buffer"""
+ if self.audio_buffer and self.running:
+ audio_data = linear16_to_audio(audio_bytes, self.config["sample_rate"])
+ self.audio_buffer.add_audio(audio_data)
+
+ async def _process_audio_chunks(self):
+ """Process audio chunks for interim results"""
+ while self.running:
+ try:
+ if self.audio_buffer and self.audio_buffer.has_chunk_ready():
+ chunk_signal = self.audio_buffer.get_chunk_for_processing()
+ if chunk_signal is not None:
+ all_audio = self.audio_buffer.get_all_audio()
+
+ if len(all_audio) > 0 and self.audio_buffer.is_speech(all_audio[-self.audio_buffer.chunk_samples:]):
+ loop = asyncio.get_event_loop()
+ transcription = await loop.run_in_executor(
+ executor,
+ transcribe_with_nemo,
+ all_audio,
+ self.config["sample_rate"]
+ )
+
+ if transcription.strip():
+ self.processing_count += 1
+ self.accumulated_transcript = transcription
+
+ if transcription != self.last_partial or self.interim_count == 0:
+ self.last_partial = transcription
+ self.interim_count += 1
+ self.last_interim_time = time.time()
+ logger.info(f"Updated interim_count to {self.interim_count} for transcript: '{transcription}'")
+ else:
+ self.last_interim_time = time.time()
+
+ await asyncio.sleep(0.1) # Check every 100ms
+
+ except Exception as e:
+ logger.error(f"Error in chunk processing: {e}")
+ await asyncio.sleep(0.1)
+
+ async def _monitor_for_auto_final(self):
+ """Monitor for auto-final conditions"""
+ while self.running:
+ try:
+ current_time = time.time()
+
+ if (self.interim_count >= self.min_interim_count and
+ self.last_interim_time is not None and
+ (current_time - self.last_interim_time) >= self.silence_timeout and
+ not self.final_sent and
+ self.accumulated_transcript.strip()):
+
+ logger.info(f"Auto-final triggered for segment #{self.segment_number}")
+
+ await self.send_transcription(self.accumulated_transcript, is_final=True)
+ await self.start_new_segment()
+
+ await asyncio.sleep(0.5) # Check every 500ms
+
+ except Exception as e:
+ logger.error(f"Error in auto-final monitoring: {e}")
+ await asyncio.sleep(0.5)
+
+ async def send_transcription(self, text, is_final=True, confidence=0.9):
+ """Send transcription in jambonz format"""
+ try:
+ message = {
+ "type": "transcription",
+ "is_final": True, # Always send as final
+ "alternatives": [
+ {
+ "transcript": text,
+ "confidence": confidence
+ }
+ ],
+ "language": self.config.get("language", "ar-EG"),
+ "channel": 1
+ }
+
+ await self.websocket.send(json.dumps(message))
+ self.final_sent = True
+
+ logger.info(f"Sent FINAL transcription to Jambonz: '{text}'")
+
+ except Exception as e:
+ logger.error(f"Error sending transcription: {e}")
+
+ async def send_error(self, error_message):
+ """Send error message in jambonz format"""
+ try:
+ message = {
+ "type": "error",
+ "error": error_message
+ }
+ await self.websocket.send(json.dumps(message))
+ logger.error(f"Sent error: {error_message}")
+ except Exception as e:
+ logger.error(f"Error sending error message: {e}")
+
+async def handle_jambonz_websocket(websocket):
+ """Handle jambonz WebSocket connections"""
+
+ client_id = f"jambonz_{id(websocket)}"
+ logger.info(f"New NeMo jambonz connection: {client_id}")
+
+ handler = JambonzSTTHandler(websocket)
+
+ try:
+ async for message in websocket:
+ try:
+ if isinstance(message, str):
+ data = json.loads(message)
+ message_type = data.get("type")
+
+ if message_type == "start":
+ logger.info(f"Received start message: {data}")
+ await handler.start_processing(data)
+
+ elif message_type == "stop":
+ logger.info("Received stop message - closing WebSocket")
+ await handler.stop_processing()
+ await websocket.close(code=1000, reason="Session stopped by client")
+ break
+
+ else:
+ logger.warning(f"Unknown message type: {message_type}")
+ await handler.send_error(f"Unknown message type: {message_type}")
+
+ else:
+ # Handle binary audio data
+ if not handler.running or handler.audio_buffer is None:
+ logger.warning("Received audio data outside of active session")
+ await handler.send_error("Received audio before start message or after stop")
+ continue
+
+ await handler.add_audio_data(message)
+
+ except json.JSONDecodeError as e:
+ logger.error(f"JSON decode error: {e}")
+ await handler.send_error(f"Invalid JSON: {str(e)}")
+ except Exception as e:
+ logger.error(f"Error processing message: {e}")
+ await handler.send_error(f"Processing error: {str(e)}")
+
+ except websockets.exceptions.ConnectionClosed:
+ logger.info(f"NeMo jambonz connection closed: {client_id}")
+ except Exception as e:
+ logger.error(f"NeMo jambonz WebSocket error: {e}")
+ try:
+ await handler.send_error(str(e))
+ except:
+ pass
+ finally:
+ if handler.running:
+ await handler.stop_processing()
+ logger.info(f"NeMo jambonz connection ended: {client_id}")
+
+async def main():
+ """Start the NeMo jambonz STT WebSocket server"""
+ logger.info("Starting NeMo Jambonz STT WebSocket server on port 3007...")
+
+ # Start WebSocket server
+ server = await websockets.serve(
+ handle_jambonz_websocket,
+ "0.0.0.0",
+ 3007,
+ ping_interval=20,
+ ping_timeout=10,
+ close_timeout=10
+ )
+
+ logger.info("NeMo Jambonz STT WebSocket server started on ws://0.0.0.0:3007")
+ logger.info("Ready to handle jambonz STT requests with NeMo FastConformer")
+ logger.info("FEATURES:")
+ logger.info("- Arabic ASR using NeMo FastConformer model")
+ logger.info("- Arabic number word to digit conversion")
+ logger.info("- Continuous transcription with segmentation")
+ logger.info("- Voice Activity Detection")
+
+ # Wait for the server to close
+ await server.wait_closed()
+
+if __name__ == "__main__":
+ print("=" * 80)
+ print("NeMo FastConformer Jambonz STT Server")
+ print("=" * 80)
+ print("Model: NeMo FastConformer Arabic ASR")
+ print("WebSocket Port: 3007")
+ print("Protocol: jambonz STT API")
+ print("Audio Format: LINEAR16 PCM @ 8kHz → 16kHz")
+ print("Language: Arabic with number conversion")
+ print("=" * 80)
+
+ try:
+ asyncio.run(main())
+ except KeyboardInterrupt:
+ print("\nShutting down NeMo server...")
+ except Exception as e:
+ print(f"Server error: {e}")
diff --git a/aqib-whipser4-arabic.py b/aqib-whipser4-arabic.py
new file mode 100644
index 0000000000000000000000000000000000000000..afb3839992478d56f98f73cac92d7f7b3a491a29
--- /dev/null
+++ b/aqib-whipser4-arabic.py
@@ -0,0 +1,654 @@
+import torch
+import asyncio
+import websockets
+import json
+import threading
+import numpy as np
+from transformers import AutoModelForSpeechSeq2Seq, AutoProcessor, WhisperTokenizer, pipeline
+import subprocess
+import logging
+import time
+from concurrent.futures import ThreadPoolExecutor
+import struct
+import re
+
+# Arabic number conversion imports
+try:
+ from pyarabic.number import text2number
+ arabic_numbers_available = True
+ print("Arabic number conversion available")
+except ImportError:
+ arabic_numbers_available = False
+ print("pyarabic not available - install with: pip install pyarabic")
+ print("Arabic numbers will not be converted to digits")
+
+# Set up logging
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+
+
+def convert_arabic_numbers_in_sentence(sentence: str) -> str:
+ """
+ Replace Arabic number words in a sentence with digits,
+ preserving all other words and punctuation.
+ Handles common spelling variants and zero explicitly.
+ """
+ try:
+ print("Fxn called--------------")
+
+ # --- Normalization step ---
+ replacements = {
+ "اربعة": "أربعة",
+ "اربع": "أربع",
+ "اثنين": "اثنان",
+ "اتنين": "اثنان", # Egyptian variant
+ "ثلاث": "ثلاثة",
+ "خمس": "خمسة",
+ "ست": "ستة",
+ "سبع": "سبعة",
+ "ثمان": "ثمانية",
+ "تسع": "تسعة",
+ "عشر": "عشرة",
+ }
+ for wrong, correct in replacements.items():
+ sentence = re.sub(rf"\b{wrong}\b", correct, sentence)
+
+ # --- Split by whitespace but keep spaces ---
+ words = re.split(r'(\s+)', sentence)
+ converted_words = []
+
+ for word in words:
+ stripped = word.strip()
+ if not stripped: # skip spaces
+ converted_words.append(word)
+ continue
+
+ try:
+ num = text2number(stripped)
+
+ # Accept valid numbers, including zero explicitly
+ if isinstance(num, int):
+ if num != 0 or stripped == "صفر":
+ converted_words.append(str(num))
+ else:
+ converted_words.append(word)
+ else:
+ converted_words.append(word)
+
+ except Exception:
+ converted_words.append(word)
+
+ return ''.join(converted_words)
+
+ except Exception as e:
+ logger.warning(f"Error converting Arabic numbers: {e}")
+ return sentence
+
+
+# Try to install flash-attn if not available
+try:
+ import flash_attn
+ use_flash_attn = True
+except ImportError:
+ print("Flash attention not available, using standard attention")
+ use_flash_attn = False
+ try:
+ subprocess.run(
+ "pip install websockets",
+ shell=True,
+ check=False
+ )
+ subprocess.run(
+ "pip install flash-attn --no-build-isolation",
+ shell=True,
+ check=False
+ )
+ except:
+ pass
+
+device = "cuda" if torch.cuda.is_available() else "cpu"
+torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
+MODEL_NAME = "openai/whisper-large-v3-turbo"
+
+print(f"Using device: {device}")
+print(f"CUDA available: {torch.cuda.is_available()}")
+if torch.cuda.is_available():
+ print(f"GPU: {torch.cuda.get_device_name(0)}")
+
+# Model initialization with fallback for attention implementation
+try:
+ if use_flash_attn and torch.cuda.is_available():
+ model = AutoModelForSpeechSeq2Seq.from_pretrained(
+ MODEL_NAME,
+ torch_dtype=torch_dtype,
+ low_cpu_mem_usage=True,
+ use_safetensors=True,
+ attn_implementation="flash_attention_2"
+ )
+ else:
+ model = AutoModelForSpeechSeq2Seq.from_pretrained(
+ MODEL_NAME,
+ torch_dtype=torch_dtype,
+ low_cpu_mem_usage=True,
+ use_safetensors=True
+ )
+except Exception as e:
+ print(f"Error loading model with flash attention: {e}")
+ model = AutoModelForSpeechSeq2Seq.from_pretrained(
+ MODEL_NAME,
+ torch_dtype=torch_dtype,
+ low_cpu_mem_usage=True,
+ use_safetensors=True
+ )
+
+model.to(device)
+
+processor = AutoProcessor.from_pretrained(MODEL_NAME)
+tokenizer = WhisperTokenizer.from_pretrained(MODEL_NAME)
+
+# Thread pool for processing audio
+executor = ThreadPoolExecutor(max_workers=4)
+
+class JambonzAudioBuffer:
+ def __init__(self, sample_rate=8000, chunk_duration=1.0):
+ self.sample_rate = sample_rate
+ self.chunk_duration = chunk_duration
+ self.chunk_samples = int(chunk_duration * sample_rate)
+
+ self.buffer = np.array([], dtype=np.float32)
+ self.lock = threading.Lock()
+ self.total_audio = np.array([], dtype=np.float32)
+
+ # Voice Activity Detection (simple energy-based)
+ self.silence_threshold = 0.01
+ self.min_speech_samples = int(0.3 * sample_rate) # 300ms minimum speech
+
+ def add_audio(self, audio_data):
+ with self.lock:
+ self.buffer = np.concatenate([self.buffer, audio_data])
+ self.total_audio = np.concatenate([self.total_audio, audio_data])
+
+ def has_chunk_ready(self):
+ with self.lock:
+ return len(self.buffer) >= self.chunk_samples
+
+ def is_speech(self, audio_chunk):
+ """Simple VAD based on energy"""
+ if len(audio_chunk) < self.min_speech_samples:
+ return False
+ energy = np.mean(np.abs(audio_chunk))
+ return energy > self.silence_threshold
+
+ def get_chunk_for_processing(self):
+ """Get audio chunk for processing - but don't remove it from buffer for interim results"""
+ with self.lock:
+ if len(self.buffer) < self.chunk_samples:
+ return None
+
+ # For interim results, we want to trigger processing but keep accumulating audio
+ # So we just return a signal that we have enough audio, but don't consume it
+ return np.array([1]) # Return a dummy array to signal chunk is ready
+
+ def get_all_audio(self):
+ """Get all accumulated audio for final transcription"""
+ with self.lock:
+ return self.total_audio.copy()
+
+ def clear(self):
+ with self.lock:
+ self.buffer = np.array([], dtype=np.float32)
+ self.total_audio = np.array([], dtype=np.float32)
+
+def linear16_to_audio(audio_bytes, sample_rate=8000):
+ """Convert LINEAR16 PCM bytes to numpy array (jambonz format)"""
+ try:
+ # jambonz sends LINEAR16 PCM at 8kHz
+ audio_array = np.frombuffer(audio_bytes, dtype=np.int16)
+ # Convert to float32 and normalize
+ audio_array = audio_array.astype(np.float32) / 32768.0
+ return audio_array
+ except Exception as e:
+ logger.error(f"Error converting LINEAR16 to audio: {e}")
+ return np.array([], dtype=np.float32)
+
+def resample_audio(audio_data, source_rate, target_rate):
+ """Simple resampling from 8kHz to 16kHz for Whisper"""
+ if source_rate == target_rate:
+ return audio_data
+
+ # Simple linear interpolation resampling
+ ratio = target_rate / source_rate
+ indices = np.arange(0, len(audio_data), 1/ratio)
+ indices = indices[indices < len(audio_data)]
+ resampled = np.interp(indices, np.arange(len(audio_data)), audio_data)
+
+ # Ensure proper float32 dtype for consistency
+ return resampled.astype(np.float32)
+
+def transcribe_chunk_direct(audio_data, source_sample_rate=8000, target_sample_rate=16000):
+ """Transcribe audio chunk using model's generate method directly"""
+ try:
+ if len(audio_data) == 0:
+ return ""
+
+ # Resample from 8kHz to 16kHz for Whisper
+ resampled_audio = resample_audio(audio_data, source_sample_rate, target_sample_rate)
+
+ # Ensure minimum length for Whisper
+ min_samples = int(0.1 * target_sample_rate) # 100ms minimum
+ if len(resampled_audio) < min_samples:
+ return ""
+
+ start_time = time.time()
+
+ # Prepare input features with proper dtype
+ input_features = processor(
+ resampled_audio,
+ sampling_rate=target_sample_rate,
+ return_tensors="pt"
+ ).input_features
+
+ # Ensure correct dtype and device
+ input_features = input_features.to(device=device, dtype=torch_dtype)
+
+ # Create attention mask to avoid warnings
+ attention_mask = torch.ones(
+ input_features.shape[:-1],
+ dtype=torch.long,
+ device=device
+ )
+
+ # Generate transcription using model directly
+ with torch.no_grad():
+ predicted_ids = model.generate(
+ input_features,
+ attention_mask=attention_mask,
+ max_new_tokens=128,
+ do_sample=False,
+ temperature=0.0,
+ num_beams=1,
+ language="ar",
+ task="transcribe",
+ pad_token_id=tokenizer.pad_token_id,
+ eos_token_id=tokenizer.eos_token_id
+ )
+
+ # Decode the transcription
+ transcription = tokenizer.batch_decode(
+ predicted_ids,
+ skip_special_tokens=True
+ )[0].strip()
+
+ end_time = time.time()
+
+ logger.info(f"Direct transcription completed in {end_time - start_time:.2f}s: '{transcription}'")
+ return transcription
+
+ except Exception as e:
+ logger.error(f"Error during direct transcription: {e}")
+ return ""
+
+class JambonzSTTHandler:
+ def __init__(self, websocket):
+ self.websocket = websocket
+ self.audio_buffer = None
+ self.config = {}
+ self.running = True
+ self.transcription_task = None
+ self.full_transcript = ""
+ self.last_partial = ""
+
+ # Auto-final detection variables
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.silence_timeout = 1.5 # 3 seconds of silence to trigger final
+ self.min_interim_count = 1 # Minimum interim results before considering final
+ self.auto_final_task = None
+ self.accumulated_transcript = ""
+ self.final_sent = False
+
+ async def start_processing(self, start_message):
+ """Initialize with start message from jambonz"""
+ self.config = {
+ "language": start_message.get("language", "ar-EG"),
+ "format": start_message.get("format", "raw"),
+ "encoding": start_message.get("encoding", "LINEAR16"),
+ "sample_rate": start_message.get("sampleRateHz", 8000),
+ "interim_results": start_message.get("interimResults", True),
+ "options": start_message.get("options", {})
+ }
+
+ logger.info(f"STT session started with config: {self.config}")
+
+ # Initialize audio buffer
+ self.audio_buffer = JambonzAudioBuffer(
+ sample_rate=self.config["sample_rate"],
+ chunk_duration=1.0 # Process every 1 second
+ )
+
+ # Reset auto-final detection variables
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.accumulated_transcript = ""
+ self.final_sent = False
+
+ # Start background transcription task
+ self.transcription_task = asyncio.create_task(self._process_audio_chunks())
+
+ # Start auto-final detection task
+ self.auto_final_task = asyncio.create_task(self._monitor_for_auto_final())
+
+ async def stop_processing(self):
+ """Stop processing and send final transcription"""
+ self.running = False
+
+ # Cancel background tasks
+ if self.transcription_task:
+ self.transcription_task.cancel()
+ try:
+ await self.transcription_task
+ except asyncio.CancelledError:
+ pass
+
+ if self.auto_final_task:
+ self.auto_final_task.cancel()
+ try:
+ await self.auto_final_task
+ except asyncio.CancelledError:
+ pass
+
+ # Send final transcription if not already sent
+ if not self.final_sent and self.accumulated_transcript.strip():
+ await self.send_transcription(self.accumulated_transcript, is_final=True)
+
+ # Also process any remaining audio for comprehensive final transcription
+ if self.audio_buffer:
+ all_audio = self.audio_buffer.get_all_audio()
+ if len(all_audio) > 0 and not self.final_sent:
+ loop = asyncio.get_event_loop()
+ final_transcription = await loop.run_in_executor(
+ executor,
+ transcribe_chunk_direct,
+ all_audio,
+ self.config["sample_rate"]
+ )
+
+ if final_transcription.strip():
+ # Send comprehensive final transcription
+ await self.send_transcription(final_transcription, is_final=True)
+
+ logger.info("STT session ended")
+
+ async def add_audio_data(self, audio_bytes):
+ """Add audio data to buffer"""
+ if self.audio_buffer:
+ audio_data = linear16_to_audio(audio_bytes, self.config["sample_rate"])
+ self.audio_buffer.add_audio(audio_data)
+
+ async def _process_audio_chunks(self):
+ """Process audio chunks for interim results"""
+ while self.running and self.config.get("interim_results", False):
+ try:
+ if self.audio_buffer and self.audio_buffer.has_chunk_ready():
+ chunk_signal = self.audio_buffer.get_chunk_for_processing()
+ if chunk_signal is not None:
+ # Get all accumulated audio so far for complete transcription
+ all_audio = self.audio_buffer.get_all_audio()
+
+ # Only process if we have actual speech content
+ if len(all_audio) > 0 and self.audio_buffer.is_speech(all_audio[-self.audio_buffer.chunk_samples:]):
+ # Run transcription on all accumulated audio
+ loop = asyncio.get_event_loop()
+ transcription = await loop.run_in_executor(
+ executor,
+ transcribe_chunk_direct,
+ all_audio,
+ self.config["sample_rate"]
+ )
+
+ if transcription.strip() and transcription != self.last_partial:
+ self.last_partial = transcription
+ self.accumulated_transcript = transcription # Update accumulated transcript
+ self.interim_count += 1
+ self.last_interim_time = time.time()
+
+ # Send interim result
+ await self.send_transcription(transcription, is_final=False)
+
+ logger.info(f"Interim #{self.interim_count}: '{transcription}'")
+
+ # Small delay to prevent excessive processing
+ await asyncio.sleep(0.1)
+
+ except Exception as e:
+ logger.error(f"Error in chunk processing: {e}")
+ await asyncio.sleep(1)
+
+ async def _monitor_for_auto_final(self):
+ """Monitor for auto-final conditions: 3 seconds silence after 3+ interim results"""
+ while self.running:
+ try:
+ current_time = time.time()
+
+ # Check if we should send auto-final transcription
+ if (self.interim_count >= self.min_interim_count and
+ self.last_interim_time is not None and
+ (current_time - self.last_interim_time) >= self.silence_timeout and
+ not self.final_sent and
+ self.accumulated_transcript.strip()):
+
+ logger.info(f"Auto-final triggered: {self.interim_count} interim results, "
+ f"{current_time - self.last_interim_time:.1f}s silence")
+
+ # Send the accumulated transcript as final
+ await self.send_transcription(self.accumulated_transcript, is_final=True)
+ self.final_sent = True
+
+ # Reset counters for potential next utterance
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.accumulated_transcript = ""
+
+ # Check every 0.5 seconds
+ await asyncio.sleep(0.5)
+
+ except Exception as e:
+ logger.error(f"Error in auto-final monitoring: {e}")
+ await asyncio.sleep(1)
+
+ # async def send_transcription(self, text, is_final=False, confidence=0.9):
+ # """Send transcription in jambonz format with Arabic number conversion"""
+ # try:
+ # # Convert Arabic numbers to digits before sending
+ # original_text = text
+ # converted_text = convert_arabic_numbers_in_sentence(text)
+
+ # # Log the conversion if numbers were found and converted
+ # if original_text != converted_text:
+ # logger.info(f"Arabic numbers converted: '{original_text}' -> '{converted_text}'")
+
+ # message = {
+ # "type": "transcription",
+ # "is_final": is_final,
+ # "alternatives": [
+ # {
+ # "transcript": converted_text,
+ # "confidence": confidence
+ # }
+ # ],
+ # "language": self.config.get("language", "ar-EG"),
+ # "channel": 1
+ # }
+
+ # await self.websocket.send(json.dumps(message))
+ # logger.info(f"Sent {'FINAL' if is_final else 'interim'} transcription: '{converted_text}'")
+
+ # if is_final:
+ # self.final_sent = True
+
+ # except Exception as e:
+ # logger.error(f"Error sending transcription: {e}")
+
+
+
+ async def send_transcription(self, text, is_final=False, confidence=0.9):
+ """Send transcription in jambonz format with Arabic number conversion, only for final results"""
+ try:
+ if not is_final:
+ # Do nothing for interim results
+ logger.debug("Skipping interim transcription (not final).")
+ return
+
+ # Convert Arabic numbers only for final transcripts
+ original_text = text
+ converted_text = convert_arabic_numbers_in_sentence(text)
+
+ # Log the conversion if numbers were found and converted
+ if original_text != converted_text:
+ logger.info(f"Arabic numbers converted: '{original_text}' -> '{converted_text}'")
+
+ message = {
+ "type": "transcription",
+ "is_final": True,
+ "alternatives": [
+ {
+ "transcript": converted_text,
+ "confidence": confidence
+ }
+ ],
+ "language": self.config.get("language", "ar-EG"),
+ "channel": 1
+ }
+
+ # Send only final messages
+ await self.websocket.send(json.dumps(message))
+ logger.info(f"Sent FINAL transcription: '{converted_text}'")
+
+ self.final_sent = True
+
+ except Exception as e:
+ logger.error(f"Error sending transcription: {e}")
+
+
+
+
+ async def send_error(self, error_message):
+ """Send error message in jambonz format"""
+ try:
+ message = {
+ "type": "error",
+ "error": error_message
+ }
+ await self.websocket.send(json.dumps(message))
+ logger.error(f"Sent error: {error_message}")
+ except Exception as e:
+ logger.error(f"Error sending error message: {e}")
+
+async def handle_jambonz_websocket(websocket):
+ """Handle jambonz WebSocket connections"""
+
+ client_id = f"jambonz_{id(websocket)}"
+ logger.info(f"New jambonz connection: {client_id}")
+
+ handler = JambonzSTTHandler(websocket)
+
+ try:
+ async for message in websocket:
+ try:
+ if isinstance(message, str):
+ # Handle JSON control messages
+ data = json.loads(message)
+ message_type = data.get("type")
+
+ if message_type == "start":
+ logger.info(f"Received start message: {data}")
+ await handler.start_processing(data)
+
+ elif message_type == "stop":
+ logger.info("Received stop message")
+ await handler.stop_processing()
+ # Close websocket after final transcription
+ await websocket.close(code=1000, reason="Session completed")
+ break
+
+ else:
+ logger.warning(f"Unknown message type: {message_type}")
+ await handler.send_error(f"Unknown message type: {message_type}")
+
+ else:
+ # Handle binary audio data (LINEAR16 PCM)
+ if handler.audio_buffer is None:
+ await handler.send_error("Received audio before start message")
+ continue
+
+ await handler.add_audio_data(message)
+
+ except json.JSONDecodeError as e:
+ logger.error(f"JSON decode error: {e}")
+ await handler.send_error(f"Invalid JSON: {str(e)}")
+ except Exception as e:
+ logger.error(f"Error processing message: {e}")
+ await handler.send_error(f"Processing error: {str(e)}")
+
+ except websockets.exceptions.ConnectionClosed:
+ logger.info(f"jambonz connection closed: {client_id}")
+ except Exception as e:
+ logger.error(f"jambonz WebSocket error: {e}")
+ try:
+ await handler.send_error(str(e))
+ except:
+ pass
+ finally:
+ if handler.running:
+ await handler.stop_processing()
+ logger.info(f"jambonz connection ended: {client_id}")
+
+async def main():
+ """Start the jambonz STT WebSocket server"""
+ logger.info("Starting Jambonz Custom STT WebSocket server on port 3006...")
+
+ # Start WebSocket server
+ server = await websockets.serve(
+ handle_jambonz_websocket,
+ "0.0.0.0",
+ 3006,
+ ping_interval=20,
+ ping_timeout=10,
+ close_timeout=10
+ )
+
+ logger.info("Jambonz Custom STT WebSocket server started on ws://0.0.0.0:3006")
+ logger.info("Ready to handle jambonz STT requests")
+ logger.info("- Expects LINEAR16 PCM audio at 8kHz")
+ logger.info("- Supports interim results with auto-final detection")
+ logger.info("- Auto-final: 3+ interim results + 1.3s silence")
+ logger.info("- Resamples to 16kHz for Whisper processing")
+ logger.info("- Converts Arabic numbers to digits before sending")
+
+ # Wait for the server to close
+ await server.wait_closed()
+
+if __name__ == "__main__":
+ print("=" * 60)
+ print("Jambonz Custom STT Server with Whisper + Arabic Numbers")
+ print("=" * 60)
+ print(f"Model: {MODEL_NAME}")
+ print(f"Device: {device}")
+ print("WebSocket Port: 3006")
+ print("Protocol: jambonz STT API")
+ print("Audio Format: LINEAR16 PCM @ 8kHz")
+ print("Auto-Final: 2+ speech activities + 1.3s silence")
+ print("Arabic Numbers: Converted to digits in FINAL transcriptions only")
+ print("Interim Results: DISABLED (final transcription only)")
+ if arabic_numbers_available:
+ print("✓ pyarabic library available for number conversion")
+ else:
+ print("✗ pyarabic library not available - install with: pip install pyarabic")
+ print("=" * 60)
+
+ try:
+ asyncio.run(main())
+ except KeyboardInterrupt:
+ print("\nShutting down server...")
+ except Exception as e:
+ print(f"Server error: {e}")
diff --git a/aqib-whipser_ft-arabic_denoiser_meta.py b/aqib-whipser_ft-arabic_denoiser_meta.py
new file mode 100644
index 0000000000000000000000000000000000000000..9aa5f8e09bc740180dffa0f982344d9e33c95357
--- /dev/null
+++ b/aqib-whipser_ft-arabic_denoiser_meta.py
@@ -0,0 +1,787 @@
+# import torch
+# import asyncio
+# import websockets
+# import json
+# import threading
+# import numpy as np
+# from transformers import AutoModelForSpeechSeq2Seq, AutoProcessor, WhisperTokenizer, pipeline , WhisperForConditionalGeneration, WhisperProcessor
+# import subprocess
+# import logging
+# import time
+# from concurrent.futures import ThreadPoolExecutor
+# import struct
+# import re
+# 3 - 10 - 2025
+import torch
+import asyncio
+import websockets
+import json
+import threading
+import numpy as np
+from transformers import AutoModelForSpeechSeq2Seq, AutoProcessor, WhisperTokenizer
+import subprocess
+import logging
+import time
+from concurrent.futures import ThreadPoolExecutor
+import re
+
+# --- Denoiser added ---
+try:
+ import noisereduce as nr
+ denoiser_available = True
+ print("Denoiser available (using noisereduce)")
+except ImportError:
+ denoiser_available = False
+ print("noisereduce not available - install with: pip install noisereduce")
+##############################################################################################
+# Arabic number conversion imports
+try:
+ from pyarabic.number import text2number
+ arabic_numbers_available = True
+ print("Arabic number conversion available")
+except ImportError:
+ arabic_numbers_available = False
+ print("pyarabic not available - install with: pip install pyarabic")
+ print("Arabic numbers will not be converted to digits")
+
+# Set up logging
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+# 3 - 10 - 2025
+# def denoise_audio(audio_data, sample_rate=16000):
+# """Apply noise reduction to audio using noisereduce."""
+# if not denoiser_available or len(audio_data) == 0:
+# return audio_data
+# try:
+# reduced = nr.reduce_noise(y=audio_data, sr=sample_rate)
+# return reduced.astype(np.float32)
+# except Exception as e:
+# logger.warning(f"Denoiser failed: {e}")
+# return audio_data
+#############################################################################################
+def convert_arabic_numbers_in_sentence(sentence: str) -> str:
+ """
+ Replace Arabic number words in a sentence with digits,
+ preserving all other words and punctuation.
+ Handles common spelling variants and zero explicitly.
+ """
+ try:
+ print("Fxn called--------------")
+
+ # --- Normalization step ---
+ replacements = {
+ "اربعة": "أربعة",
+ "اربع": "أربع",
+ "اثنين": "اثنان",
+ "اتنين": "اثنان", # Egyptian variant
+ "ثلاث": "ثلاثة",
+ "خمس": "خمسة",
+ "ست": "ستة",
+ "سبع": "سبعة",
+ "ثمان": "ثمانية",
+ "تسع": "تسعة",
+ "عشر": "عشرة",
+ }
+ for wrong, correct in replacements.items():
+ sentence = re.sub(rf"\b{wrong}\b", correct, sentence)
+
+ # --- Split by whitespace but keep spaces ---
+ words = re.split(r'(\s+)', sentence)
+ converted_words = []
+
+ for word in words:
+ stripped = word.strip()
+ if not stripped: # skip spaces
+ converted_words.append(word)
+ continue
+
+ try:
+ num = text2number(stripped)
+
+ # Accept valid numbers, including zero explicitly
+ if isinstance(num, int):
+ if num != 0 or stripped == "صفر":
+ converted_words.append(str(num))
+ else:
+ converted_words.append(word)
+ else:
+ converted_words.append(word)
+
+ except Exception:
+ converted_words.append(word)
+
+ return ''.join(converted_words)
+
+ except Exception as e:
+ logger.warning(f"Error converting Arabic numbers: {e}")
+ return sentence
+
+
+# Try to install flash-attn if not available
+try:
+ import flash_attn
+ use_flash_attn = True
+except ImportError:
+ print("Flash attention not available, using standard attention")
+ use_flash_attn = False
+ try:
+ subprocess.run(
+ "pip install websockets",
+ shell=True,
+ check=False
+ )
+ subprocess.run(
+ "pip install flash-attn --no-build-isolation",
+ shell=True,
+ check=False
+ )
+ except:
+ pass
+
+device = "cuda" if torch.cuda.is_available() else "cpu"
+# --- Facebook Denoiser added ---
+try:
+ import torchaudio
+ from denoiser import pretrained
+ # Load DNS64 pretrained model (auto-downloads if not cached)
+ denoiser_model = pretrained.dns64().to(device)
+ denoiser_model.eval()
+ denoiser_available = True
+ print("facebook/denoiser loaded successfully")
+except ImportError as e:
+ denoiser_available = False
+ print("facebook/denoiser not available - install with: pip install denoiser torchaudio")
+ denoiser_model = None
+
+
+torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
+MODEL_NAME = "alaatiger989/FT_Arabic_Whisper_V1_1"#"openai/whisper-large-v3-turbo"
+
+print(f"Using device: {device}")
+print(f"CUDA available: {torch.cuda.is_available()}")
+if torch.cuda.is_available():
+ print(f"GPU: {torch.cuda.get_device_name(0)}")
+
+# Model initialization with fallback for attention implementation
+try:
+ if use_flash_attn and torch.cuda.is_available():
+ model = AutoModelForSpeechSeq2Seq.from_pretrained(
+ MODEL_NAME,
+ torch_dtype=torch_dtype,
+ low_cpu_mem_usage=True,
+ use_safetensors=True,
+ attn_implementation="flash_attention_2"
+ )
+ else:
+ model = AutoModelForSpeechSeq2Seq.from_pretrained(
+ MODEL_NAME,
+ torch_dtype=torch_dtype,
+ low_cpu_mem_usage=True,
+ use_safetensors=True
+ )
+except Exception as e:
+ print(f"Error loading model with flash attention: {e}")
+ model = AutoModelForSpeechSeq2Seq.from_pretrained(
+ MODEL_NAME,
+ torch_dtype=torch_dtype,
+ low_cpu_mem_usage=True,
+ use_safetensors=True
+ )
+
+model.to(device)
+
+processor = AutoProcessor.from_pretrained(MODEL_NAME)
+tokenizer = WhisperTokenizer.from_pretrained(MODEL_NAME)
+def denoise_audio(audio_data, sample_rate=16000):
+ """Apply denoising using facebook/denoiser pretrained model."""
+ if denoiser_model is None or len(audio_data) == 0:
+ return audio_data
+ try:
+ audio_tensor = torch.tensor(audio_data, dtype=torch.float32, device=device).unsqueeze(0)
+ with torch.no_grad():
+ denoised_tensor = denoiser_model(audio_tensor, sample_rate=sample_rate)[0]
+ return denoised_tensor.squeeze().cpu().numpy().astype("float32")
+ except Exception as e:
+ print(f"[WARN] Denoiser failed: {e}")
+ return audio_data
+# def denoise_audio(audio_data, sample_rate=16000):
+# """Apply denoising using facebook/denoiser pretrained model."""
+# if not denoiser_available or denoiser_model is None or len(audio_data) == 0:
+# return audio_data
+# try:
+# # Convert numpy -> torch tensor
+# audio_tensor = torch.tensor(audio_data, dtype=torch.float32, device=device).unsqueeze(0)
+# with torch.no_grad():
+# denoised_tensor = denoiser_model(audio_tensor)[0]
+# # Back to numpy
+# denoised_audio = denoised_tensor.squeeze().cpu().numpy().astype(np.float32)
+# return denoised_audio
+# except Exception as e:
+# logger.warning(f"Denoiser failed: {e}")
+# return audio_data
+# Thread pool for processing audio
+executor = ThreadPoolExecutor(max_workers=4)
+
+class JambonzAudioBuffer:
+ def __init__(self, sample_rate=8000, chunk_duration=1.0):
+ self.sample_rate = sample_rate
+ self.chunk_duration = chunk_duration
+ self.chunk_samples = int(chunk_duration * sample_rate)
+
+ self.buffer = np.array([], dtype=np.float32)
+ self.lock = threading.Lock()
+ self.total_audio = np.array([], dtype=np.float32)
+
+ # Voice Activity Detection (simple energy-based)
+ self.silence_threshold = 0.01
+ self.min_speech_samples = int(0.3 * sample_rate) # 300ms minimum speech
+
+ def add_audio(self, audio_data):
+ with self.lock:
+ self.buffer = np.concatenate([self.buffer, audio_data])
+ self.total_audio = np.concatenate([self.total_audio, audio_data])
+
+ def has_chunk_ready(self):
+ with self.lock:
+ return len(self.buffer) >= self.chunk_samples
+
+ def is_speech(self, audio_chunk):
+ """Simple VAD based on energy"""
+ if len(audio_chunk) < self.min_speech_samples:
+ return False
+ energy = np.mean(np.abs(audio_chunk))
+ return energy > self.silence_threshold
+
+ def get_chunk_for_processing(self):
+ """Get audio chunk for processing - but don't remove it from buffer for interim results"""
+ with self.lock:
+ if len(self.buffer) < self.chunk_samples:
+ return None
+
+ # For interim results, we want to trigger processing but keep accumulating audio
+ # So we just return a signal that we have enough audio, but don't consume it
+ return np.array([1]) # Return a dummy array to signal chunk is ready
+
+ def get_all_audio(self):
+ """Get all accumulated audio for final transcription"""
+ with self.lock:
+ return self.total_audio.copy()
+
+ def clear(self):
+ with self.lock:
+ self.buffer = np.array([], dtype=np.float32)
+ self.total_audio = np.array([], dtype=np.float32)
+
+def linear16_to_audio(audio_bytes, sample_rate=8000):
+ """Convert LINEAR16 PCM bytes to numpy array (jambonz format)"""
+ try:
+ # jambonz sends LINEAR16 PCM at 8kHz
+ audio_array = np.frombuffer(audio_bytes, dtype=np.int16)
+ # Convert to float32 and normalize
+ audio_array = audio_array.astype(np.float32) / 32768.0
+ return audio_array
+ except Exception as e:
+ logger.error(f"Error converting LINEAR16 to audio: {e}")
+ return np.array([], dtype=np.float32)
+
+def resample_audio(audio_data, source_rate, target_rate):
+ """Simple resampling from 8kHz to 16kHz for Whisper"""
+ if source_rate == target_rate:
+ return audio_data
+
+ # Simple linear interpolation resampling
+ ratio = target_rate / source_rate
+ indices = np.arange(0, len(audio_data), 1/ratio)
+ indices = indices[indices < len(audio_data)]
+ resampled = np.interp(indices, np.arange(len(audio_data)), audio_data)
+
+ # Ensure proper float32 dtype for consistency
+ return resampled.astype(np.float32)
+def transcribe_chunk_direct(audio_data, source_sample_rate=8000, target_sample_rate=16000):
+ """Transcribe audio chunk using model's generate method directly"""
+ try:
+ if len(audio_data) == 0:
+ return ""
+
+ # Resample from 8kHz to 16kHz for Whisper
+ resampled_audio = resample_audio(audio_data, source_sample_rate, target_sample_rate)
+
+ # --- Denoiser added ---
+ resampled_audio = denoise_audio(resampled_audio, sample_rate=target_sample_rate)
+
+ # Ensure minimum length for Whisper
+ min_samples = int(0.1 * target_sample_rate) # 100ms minimum
+ if len(resampled_audio) < min_samples:
+ return ""
+
+ start_time = time.time()
+
+ # Prepare input features
+ input_features = processor(
+ resampled_audio,
+ sampling_rate=target_sample_rate,
+ return_tensors="pt"
+ ).input_features
+
+ input_features = input_features.to(device=device, dtype=torch_dtype)
+
+ attention_mask = torch.ones(
+ input_features.shape[:-1],
+ dtype=torch.long,
+ device=device
+ )
+
+ with torch.no_grad():
+ predicted_ids = model.generate(
+ input_features,
+ attention_mask=attention_mask,
+ max_new_tokens=128,
+ do_sample=False,
+ temperature=0.0,
+ num_beams=1,
+ language="ar",
+ task="transcribe",
+ pad_token_id=tokenizer.pad_token_id,
+ eos_token_id=tokenizer.eos_token_id
+ )
+
+ transcription = tokenizer.batch_decode(
+ predicted_ids,
+ skip_special_tokens=True
+ )[0].strip()
+
+ end_time = time.time()
+
+ logger.info(f"Direct transcription completed in {end_time - start_time:.2f}s: '{transcription}'")
+ return transcription
+
+ except Exception as e:
+ logger.error(f"Error during direct transcription: {e}")
+ return ""
+# def transcribe_chunk_direct(audio_data, source_sample_rate=8000, target_sample_rate=16000):
+# """Transcribe audio chunk using model's generate method directly"""
+# try:
+# if len(audio_data) == 0:
+# return ""
+
+# # Resample from 8kHz to 16kHz for Whisper
+# resampled_audio = resample_audio(audio_data, source_sample_rate, target_sample_rate)
+
+# # Ensure minimum length for Whisper
+# min_samples = int(0.1 * target_sample_rate) # 100ms minimum
+# if len(resampled_audio) < min_samples:
+# return ""
+
+# start_time = time.time()
+
+# # Prepare input features with proper dtype
+# input_features = processor(
+# resampled_audio,
+# sampling_rate=target_sample_rate,
+# return_tensors="pt"
+# ).input_features
+
+# # Ensure correct dtype and device
+# input_features = input_features.to(device=device, dtype=torch_dtype)
+
+# # Create attention mask to avoid warnings
+# attention_mask = torch.ones(
+# input_features.shape[:-1],
+# dtype=torch.long,
+# device=device
+# )
+
+# # Generate transcription using model directly
+# with torch.no_grad():
+# predicted_ids = model.generate(
+# input_features,
+# attention_mask=attention_mask,
+# max_new_tokens=128,
+# do_sample=False,
+# temperature=0.0,
+# num_beams=1,
+# language="ar",
+# task="transcribe",
+# pad_token_id=tokenizer.pad_token_id,
+# eos_token_id=tokenizer.eos_token_id
+# )
+
+# # Decode the transcription
+# transcription = tokenizer.batch_decode(
+# predicted_ids,
+# skip_special_tokens=True
+# )[0].strip()
+
+# end_time = time.time()
+
+# logger.info(f"Direct transcription completed in {end_time - start_time:.2f}s: '{transcription}'")
+# return transcription
+
+# except Exception as e:
+# logger.error(f"Error during direct transcription: {e}")
+# return ""
+
+class JambonzSTTHandler:
+ def __init__(self, websocket):
+ self.websocket = websocket
+ self.audio_buffer = None
+ self.config = {}
+ self.running = True
+ self.transcription_task = None
+ self.full_transcript = ""
+ self.last_partial = ""
+
+ # Auto-final detection variables
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.silence_timeout = 1.5 # 3 seconds of silence to trigger final
+ self.min_interim_count = 1 # Minimum interim results before considering final
+ self.auto_final_task = None
+ self.accumulated_transcript = ""
+ self.final_sent = False
+
+ async def start_processing(self, start_message):
+ """Initialize with start message from jambonz"""
+ self.config = {
+ "language": start_message.get("language", "ar-EG"),
+ "format": start_message.get("format", "raw"),
+ "encoding": start_message.get("encoding", "LINEAR16"),
+ "sample_rate": start_message.get("sampleRateHz", 8000),
+ "interim_results": start_message.get("interimResults", True),
+ "options": start_message.get("options", {})
+ }
+
+ logger.info(f"STT session started with config: {self.config}")
+
+ # Initialize audio buffer
+ self.audio_buffer = JambonzAudioBuffer(
+ sample_rate=self.config["sample_rate"],
+ chunk_duration=1.0 # Process every 1 second
+ )
+
+ # Reset auto-final detection variables
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.accumulated_transcript = ""
+ self.final_sent = False
+
+ # Start background transcription task
+ self.transcription_task = asyncio.create_task(self._process_audio_chunks())
+
+ # Start auto-final detection task
+ self.auto_final_task = asyncio.create_task(self._monitor_for_auto_final())
+
+ async def stop_processing(self):
+ """Stop processing and send final transcription"""
+ self.running = False
+
+ # Cancel background tasks
+ if self.transcription_task:
+ self.transcription_task.cancel()
+ try:
+ await self.transcription_task
+ except asyncio.CancelledError:
+ pass
+
+ if self.auto_final_task:
+ self.auto_final_task.cancel()
+ try:
+ await self.auto_final_task
+ except asyncio.CancelledError:
+ pass
+
+ # Send final transcription if not already sent
+ if not self.final_sent and self.accumulated_transcript.strip():
+ await self.send_transcription(self.accumulated_transcript, is_final=True)
+
+ # Also process any remaining audio for comprehensive final transcription
+ if self.audio_buffer:
+ all_audio = self.audio_buffer.get_all_audio()
+ if len(all_audio) > 0 and not self.final_sent:
+ loop = asyncio.get_event_loop()
+ final_transcription = await loop.run_in_executor(
+ executor,
+ transcribe_chunk_direct,
+ all_audio,
+ self.config["sample_rate"]
+ )
+
+ if final_transcription.strip():
+ # Send comprehensive final transcription
+ await self.send_transcription(final_transcription, is_final=True)
+
+ logger.info("STT session ended")
+
+ async def add_audio_data(self, audio_bytes):
+ """Add audio data to buffer"""
+ if self.audio_buffer:
+ audio_data = linear16_to_audio(audio_bytes, self.config["sample_rate"])
+ self.audio_buffer.add_audio(audio_data)
+
+ async def _process_audio_chunks(self):
+ """Process audio chunks for interim results"""
+ while self.running and self.config.get("interim_results", False):
+ try:
+ if self.audio_buffer and self.audio_buffer.has_chunk_ready():
+ chunk_signal = self.audio_buffer.get_chunk_for_processing()
+ if chunk_signal is not None:
+ # Get all accumulated audio so far for complete transcription
+ all_audio = self.audio_buffer.get_all_audio()
+
+ # Only process if we have actual speech content
+ if len(all_audio) > 0 and self.audio_buffer.is_speech(all_audio[-self.audio_buffer.chunk_samples:]):
+ # Run transcription on all accumulated audio
+ loop = asyncio.get_event_loop()
+ transcription = await loop.run_in_executor(
+ executor,
+ transcribe_chunk_direct,
+ all_audio,
+ self.config["sample_rate"]
+ )
+
+ if transcription.strip() and transcription != self.last_partial:
+ self.last_partial = transcription
+ self.accumulated_transcript = transcription # Update accumulated transcript
+ self.interim_count += 1
+ self.last_interim_time = time.time()
+
+ # Send interim result
+ await self.send_transcription(transcription, is_final=False)
+
+ logger.info(f"Interim #{self.interim_count}: '{transcription}'")
+
+ # Small delay to prevent excessive processing
+ await asyncio.sleep(0.1)
+
+ except Exception as e:
+ logger.error(f"Error in chunk processing: {e}")
+ await asyncio.sleep(1)
+
+ async def _monitor_for_auto_final(self):
+ """Monitor for auto-final conditions: 3 seconds silence after 3+ interim results"""
+ while self.running:
+ try:
+ current_time = time.time()
+
+ # Check if we should send auto-final transcription
+ if (self.interim_count >= self.min_interim_count and
+ self.last_interim_time is not None and
+ (current_time - self.last_interim_time) >= self.silence_timeout and
+ not self.final_sent and
+ self.accumulated_transcript.strip()):
+
+ logger.info(f"Auto-final triggered: {self.interim_count} interim results, "
+ f"{current_time - self.last_interim_time:.1f}s silence")
+
+ # Send the accumulated transcript as final
+ await self.send_transcription(self.accumulated_transcript, is_final=True)
+ self.final_sent = True
+
+ # Reset counters for potential next utterance
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.accumulated_transcript = ""
+
+ # Check every 0.5 seconds
+ await asyncio.sleep(0.5)
+
+ except Exception as e:
+ logger.error(f"Error in auto-final monitoring: {e}")
+ await asyncio.sleep(1)
+
+ # async def send_transcription(self, text, is_final=False, confidence=0.9):
+ # """Send transcription in jambonz format with Arabic number conversion"""
+ # try:
+ # # Convert Arabic numbers to digits before sending
+ # original_text = text
+ # converted_text = convert_arabic_numbers_in_sentence(text)
+
+ # # Log the conversion if numbers were found and converted
+ # if original_text != converted_text:
+ # logger.info(f"Arabic numbers converted: '{original_text}' -> '{converted_text}'")
+
+ # message = {
+ # "type": "transcription",
+ # "is_final": is_final,
+ # "alternatives": [
+ # {
+ # "transcript": converted_text,
+ # "confidence": confidence
+ # }
+ # ],
+ # "language": self.config.get("language", "ar-EG"),
+ # "channel": 1
+ # }
+
+ # await self.websocket.send(json.dumps(message))
+ # logger.info(f"Sent {'FINAL' if is_final else 'interim'} transcription: '{converted_text}'")
+
+ # if is_final:
+ # self.final_sent = True
+
+ # except Exception as e:
+ # logger.error(f"Error sending transcription: {e}")
+
+
+
+ async def send_transcription(self, text, is_final=False, confidence=0.9):
+ """Send transcription in jambonz format with Arabic number conversion, only for final results"""
+ try:
+ if not is_final:
+ # Do nothing for interim results
+ logger.debug("Skipping interim transcription (not final).")
+ return
+
+ # Convert Arabic numbers only for final transcripts
+ original_text = text
+ converted_text = convert_arabic_numbers_in_sentence(text)
+
+ # Log the conversion if numbers were found and converted
+ if original_text != converted_text:
+ logger.info(f"Arabic numbers converted: '{original_text}' -> '{converted_text}'")
+
+ message = {
+ "type": "transcription",
+ "is_final": True,
+ "alternatives": [
+ {
+ "transcript": original_text,#converted_text,
+ "confidence": confidence
+ }
+ ],
+ "language": self.config.get("language", "ar-EG"),
+ "channel": 1
+ }
+
+ # Send only final messages
+ await self.websocket.send(json.dumps(message))
+ logger.info(f"Sent FINAL transcription: '{converted_text}'")
+
+ self.final_sent = True
+
+ except Exception as e:
+ logger.error(f"Error sending transcription: {e}")
+
+
+
+
+ async def send_error(self, error_message):
+ """Send error message in jambonz format"""
+ try:
+ message = {
+ "type": "error",
+ "error": error_message
+ }
+ await self.websocket.send(json.dumps(message))
+ logger.error(f"Sent error: {error_message}")
+ except Exception as e:
+ logger.error(f"Error sending error message: {e}")
+
+async def handle_jambonz_websocket(websocket):
+ """Handle jambonz WebSocket connections"""
+
+ client_id = f"jambonz_{id(websocket)}"
+ logger.info(f"New jambonz connection: {client_id}")
+
+ handler = JambonzSTTHandler(websocket)
+
+ try:
+ async for message in websocket:
+ try:
+ if isinstance(message, str):
+ # Handle JSON control messages
+ data = json.loads(message)
+ message_type = data.get("type")
+
+ if message_type == "start":
+ logger.info(f"Received start message: {data}")
+ await handler.start_processing(data)
+
+ elif message_type == "stop":
+ logger.info("Received stop message")
+ await handler.stop_processing()
+ # Close websocket after final transcription
+ await websocket.close(code=1000, reason="Session completed")
+ break
+
+ else:
+ logger.warning(f"Unknown message type: {message_type}")
+ await handler.send_error(f"Unknown message type: {message_type}")
+
+ else:
+ # Handle binary audio data (LINEAR16 PCM)
+ if handler.audio_buffer is None:
+ await handler.send_error("Received audio before start message")
+ continue
+
+ await handler.add_audio_data(message)
+
+ except json.JSONDecodeError as e:
+ logger.error(f"JSON decode error: {e}")
+ await handler.send_error(f"Invalid JSON: {str(e)}")
+ except Exception as e:
+ logger.error(f"Error processing message: {e}")
+ await handler.send_error(f"Processing error: {str(e)}")
+
+ except websockets.exceptions.ConnectionClosed:
+ logger.info(f"jambonz connection closed: {client_id}")
+ except Exception as e:
+ logger.error(f"jambonz WebSocket error: {e}")
+ try:
+ await handler.send_error(str(e))
+ except:
+ pass
+ finally:
+ if handler.running:
+ await handler.stop_processing()
+ logger.info(f"jambonz connection ended: {client_id}")
+
+async def main():
+ """Start the jambonz STT WebSocket server"""
+ logger.info("Starting Jambonz Custom STT WebSocket server on port 3006...")
+
+ # Start WebSocket server
+ server = await websockets.serve(
+ handle_jambonz_websocket,
+ "0.0.0.0",
+ 3006,
+ ping_interval=20,
+ ping_timeout=10,
+ close_timeout=10
+ )
+
+ logger.info("Jambonz Custom STT WebSocket server started on ws://0.0.0.0:3006")
+ logger.info("Ready to handle jambonz STT requests")
+ logger.info("- Expects LINEAR16 PCM audio at 8kHz")
+ logger.info("- Supports interim results with auto-final detection")
+ logger.info("- Auto-final: 3+ interim results + 1.3s silence")
+ logger.info("- Resamples to 16kHz for Whisper processing")
+ logger.info("- Converts Arabic numbers to digits before sending")
+
+ # Wait for the server to close
+ await server.wait_closed()
+
+if __name__ == "__main__":
+ print("=" * 60)
+ print("Jambonz Custom STT Server with Whisper + Arabic Numbers")
+ print("=" * 60)
+ print(f"Model: {MODEL_NAME}")
+ print(f"Device: {device}")
+ print("WebSocket Port: 3006")
+ print("Protocol: jambonz STT API")
+ print("Audio Format: LINEAR16 PCM @ 8kHz")
+ print("Auto-Final: 2+ speech activities + 1.3s silence")
+ print("Arabic Numbers: Converted to digits in FINAL transcriptions only")
+ print("Interim Results: DISABLED (final transcription only)")
+ if arabic_numbers_available:
+ print("✓ pyarabic library available for number conversion")
+ else:
+ print("✗ pyarabic library not available - install with: pip install pyarabic")
+ print("=" * 60)
+
+ try:
+ asyncio.run(main())
+ except KeyboardInterrupt:
+ print("\nShutting down server...")
+ except Exception as e:
+ print(f"Server error: {e}")
diff --git a/aqib-whipser_ft-arabic_noise_reducer.py b/aqib-whipser_ft-arabic_noise_reducer.py
new file mode 100644
index 0000000000000000000000000000000000000000..bf887f0303c8d32f8255b90789a9c8a4fb0e2061
--- /dev/null
+++ b/aqib-whipser_ft-arabic_noise_reducer.py
@@ -0,0 +1,746 @@
+# import torch
+# import asyncio
+# import websockets
+# import json
+# import threading
+# import numpy as np
+# from transformers import AutoModelForSpeechSeq2Seq, AutoProcessor, WhisperTokenizer, pipeline , WhisperForConditionalGeneration, WhisperProcessor
+# import subprocess
+# import logging
+# import time
+# from concurrent.futures import ThreadPoolExecutor
+# import struct
+# import re
+# 3 - 10 - 2025
+import torch
+import asyncio
+import websockets
+import json
+import threading
+import numpy as np
+from transformers import AutoModelForSpeechSeq2Seq, AutoProcessor, WhisperTokenizer
+import subprocess
+import logging
+import time
+from concurrent.futures import ThreadPoolExecutor
+import re
+
+# --- Denoiser added ---
+try:
+ import noisereduce as nr
+ denoiser_available = True
+ print("Denoiser available (using noisereduce)")
+except ImportError:
+ denoiser_available = False
+ print("noisereduce not available - install with: pip install noisereduce")
+##############################################################################################
+# Arabic number conversion imports
+try:
+ from pyarabic.number import text2number
+ arabic_numbers_available = True
+ print("Arabic number conversion available")
+except ImportError:
+ arabic_numbers_available = False
+ print("pyarabic not available - install with: pip install pyarabic")
+ print("Arabic numbers will not be converted to digits")
+
+# Set up logging
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+# 3 - 10 - 2025
+def denoise_audio(audio_data, sample_rate=16000):
+ """Apply noise reduction to audio using noisereduce."""
+ if not denoiser_available or len(audio_data) == 0:
+ return audio_data
+ try:
+ reduced = nr.reduce_noise(y=audio_data, sr=sample_rate)
+ return reduced.astype(np.float32)
+ except Exception as e:
+ logger.warning(f"Denoiser failed: {e}")
+ return audio_data
+#############################################################################################
+def convert_arabic_numbers_in_sentence(sentence: str) -> str:
+ """
+ Replace Arabic number words in a sentence with digits,
+ preserving all other words and punctuation.
+ Handles common spelling variants and zero explicitly.
+ """
+ try:
+ print("Fxn called--------------")
+
+ # --- Normalization step ---
+ replacements = {
+ "اربعة": "أربعة",
+ "اربع": "أربع",
+ "اثنين": "اثنان",
+ "اتنين": "اثنان", # Egyptian variant
+ "ثلاث": "ثلاثة",
+ "خمس": "خمسة",
+ "ست": "ستة",
+ "سبع": "سبعة",
+ "ثمان": "ثمانية",
+ "تسع": "تسعة",
+ "عشر": "عشرة",
+ }
+ for wrong, correct in replacements.items():
+ sentence = re.sub(rf"\b{wrong}\b", correct, sentence)
+
+ # --- Split by whitespace but keep spaces ---
+ words = re.split(r'(\s+)', sentence)
+ converted_words = []
+
+ for word in words:
+ stripped = word.strip()
+ if not stripped: # skip spaces
+ converted_words.append(word)
+ continue
+
+ try:
+ num = text2number(stripped)
+
+ # Accept valid numbers, including zero explicitly
+ if isinstance(num, int):
+ if num != 0 or stripped == "صفر":
+ converted_words.append(str(num))
+ else:
+ converted_words.append(word)
+ else:
+ converted_words.append(word)
+
+ except Exception:
+ converted_words.append(word)
+
+ return ''.join(converted_words)
+
+ except Exception as e:
+ logger.warning(f"Error converting Arabic numbers: {e}")
+ return sentence
+
+
+# Try to install flash-attn if not available
+try:
+ import flash_attn
+ use_flash_attn = True
+except ImportError:
+ print("Flash attention not available, using standard attention")
+ use_flash_attn = False
+ try:
+ subprocess.run(
+ "pip install websockets",
+ shell=True,
+ check=False
+ )
+ subprocess.run(
+ "pip install flash-attn --no-build-isolation",
+ shell=True,
+ check=False
+ )
+ except:
+ pass
+
+device = "cuda" if torch.cuda.is_available() else "cpu"
+torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
+MODEL_NAME = "alaatiger989/FT_Arabic_Whisper_V1_1"#"openai/whisper-large-v3-turbo"
+
+print(f"Using device: {device}")
+print(f"CUDA available: {torch.cuda.is_available()}")
+if torch.cuda.is_available():
+ print(f"GPU: {torch.cuda.get_device_name(0)}")
+
+# Model initialization with fallback for attention implementation
+try:
+ if use_flash_attn and torch.cuda.is_available():
+ model = AutoModelForSpeechSeq2Seq.from_pretrained(
+ MODEL_NAME,
+ torch_dtype=torch_dtype,
+ low_cpu_mem_usage=True,
+ use_safetensors=True,
+ attn_implementation="flash_attention_2"
+ )
+ else:
+ model = AutoModelForSpeechSeq2Seq.from_pretrained(
+ MODEL_NAME,
+ torch_dtype=torch_dtype,
+ low_cpu_mem_usage=True,
+ use_safetensors=True
+ )
+except Exception as e:
+ print(f"Error loading model with flash attention: {e}")
+ model = AutoModelForSpeechSeq2Seq.from_pretrained(
+ MODEL_NAME,
+ torch_dtype=torch_dtype,
+ low_cpu_mem_usage=True,
+ use_safetensors=True
+ )
+
+model.to(device)
+
+processor = AutoProcessor.from_pretrained(MODEL_NAME)
+tokenizer = WhisperTokenizer.from_pretrained(MODEL_NAME)
+
+# Thread pool for processing audio
+executor = ThreadPoolExecutor(max_workers=4)
+
+class JambonzAudioBuffer:
+ def __init__(self, sample_rate=8000, chunk_duration=1.0):
+ self.sample_rate = sample_rate
+ self.chunk_duration = chunk_duration
+ self.chunk_samples = int(chunk_duration * sample_rate)
+
+ self.buffer = np.array([], dtype=np.float32)
+ self.lock = threading.Lock()
+ self.total_audio = np.array([], dtype=np.float32)
+
+ # Voice Activity Detection (simple energy-based)
+ self.silence_threshold = 0.01
+ self.min_speech_samples = int(0.3 * sample_rate) # 300ms minimum speech
+
+ def add_audio(self, audio_data):
+ with self.lock:
+ self.buffer = np.concatenate([self.buffer, audio_data])
+ self.total_audio = np.concatenate([self.total_audio, audio_data])
+
+ def has_chunk_ready(self):
+ with self.lock:
+ return len(self.buffer) >= self.chunk_samples
+
+ def is_speech(self, audio_chunk):
+ """Simple VAD based on energy"""
+ if len(audio_chunk) < self.min_speech_samples:
+ return False
+ energy = np.mean(np.abs(audio_chunk))
+ return energy > self.silence_threshold
+
+ def get_chunk_for_processing(self):
+ """Get audio chunk for processing - but don't remove it from buffer for interim results"""
+ with self.lock:
+ if len(self.buffer) < self.chunk_samples:
+ return None
+
+ # For interim results, we want to trigger processing but keep accumulating audio
+ # So we just return a signal that we have enough audio, but don't consume it
+ return np.array([1]) # Return a dummy array to signal chunk is ready
+
+ def get_all_audio(self):
+ """Get all accumulated audio for final transcription"""
+ with self.lock:
+ return self.total_audio.copy()
+
+ def clear(self):
+ with self.lock:
+ self.buffer = np.array([], dtype=np.float32)
+ self.total_audio = np.array([], dtype=np.float32)
+
+def linear16_to_audio(audio_bytes, sample_rate=8000):
+ """Convert LINEAR16 PCM bytes to numpy array (jambonz format)"""
+ try:
+ # jambonz sends LINEAR16 PCM at 8kHz
+ audio_array = np.frombuffer(audio_bytes, dtype=np.int16)
+ # Convert to float32 and normalize
+ audio_array = audio_array.astype(np.float32) / 32768.0
+ return audio_array
+ except Exception as e:
+ logger.error(f"Error converting LINEAR16 to audio: {e}")
+ return np.array([], dtype=np.float32)
+
+def resample_audio(audio_data, source_rate, target_rate):
+ """Simple resampling from 8kHz to 16kHz for Whisper"""
+ if source_rate == target_rate:
+ return audio_data
+
+ # Simple linear interpolation resampling
+ ratio = target_rate / source_rate
+ indices = np.arange(0, len(audio_data), 1/ratio)
+ indices = indices[indices < len(audio_data)]
+ resampled = np.interp(indices, np.arange(len(audio_data)), audio_data)
+
+ # Ensure proper float32 dtype for consistency
+ return resampled.astype(np.float32)
+def transcribe_chunk_direct(audio_data, source_sample_rate=8000, target_sample_rate=16000):
+ """Transcribe audio chunk using model's generate method directly"""
+ try:
+ if len(audio_data) == 0:
+ return ""
+
+ # Resample from 8kHz to 16kHz for Whisper
+ resampled_audio = resample_audio(audio_data, source_sample_rate, target_sample_rate)
+
+ # --- Denoiser added ---
+ resampled_audio = denoise_audio(resampled_audio, sample_rate=target_sample_rate)
+
+ # Ensure minimum length for Whisper
+ min_samples = int(0.1 * target_sample_rate) # 100ms minimum
+ if len(resampled_audio) < min_samples:
+ return ""
+
+ start_time = time.time()
+
+ # Prepare input features
+ input_features = processor(
+ resampled_audio,
+ sampling_rate=target_sample_rate,
+ return_tensors="pt"
+ ).input_features
+
+ input_features = input_features.to(device=device, dtype=torch_dtype)
+
+ attention_mask = torch.ones(
+ input_features.shape[:-1],
+ dtype=torch.long,
+ device=device
+ )
+
+ with torch.no_grad():
+ predicted_ids = model.generate(
+ input_features,
+ attention_mask=attention_mask,
+ max_new_tokens=128,
+ do_sample=False,
+ temperature=0.0,
+ num_beams=1,
+ language="ar",
+ task="transcribe",
+ pad_token_id=tokenizer.pad_token_id,
+ eos_token_id=tokenizer.eos_token_id
+ )
+
+ transcription = tokenizer.batch_decode(
+ predicted_ids,
+ skip_special_tokens=True
+ )[0].strip()
+
+ end_time = time.time()
+
+ logger.info(f"Direct transcription completed in {end_time - start_time:.2f}s: '{transcription}'")
+ return transcription
+
+ except Exception as e:
+ logger.error(f"Error during direct transcription: {e}")
+ return ""
+# def transcribe_chunk_direct(audio_data, source_sample_rate=8000, target_sample_rate=16000):
+# """Transcribe audio chunk using model's generate method directly"""
+# try:
+# if len(audio_data) == 0:
+# return ""
+
+# # Resample from 8kHz to 16kHz for Whisper
+# resampled_audio = resample_audio(audio_data, source_sample_rate, target_sample_rate)
+
+# # Ensure minimum length for Whisper
+# min_samples = int(0.1 * target_sample_rate) # 100ms minimum
+# if len(resampled_audio) < min_samples:
+# return ""
+
+# start_time = time.time()
+
+# # Prepare input features with proper dtype
+# input_features = processor(
+# resampled_audio,
+# sampling_rate=target_sample_rate,
+# return_tensors="pt"
+# ).input_features
+
+# # Ensure correct dtype and device
+# input_features = input_features.to(device=device, dtype=torch_dtype)
+
+# # Create attention mask to avoid warnings
+# attention_mask = torch.ones(
+# input_features.shape[:-1],
+# dtype=torch.long,
+# device=device
+# )
+
+# # Generate transcription using model directly
+# with torch.no_grad():
+# predicted_ids = model.generate(
+# input_features,
+# attention_mask=attention_mask,
+# max_new_tokens=128,
+# do_sample=False,
+# temperature=0.0,
+# num_beams=1,
+# language="ar",
+# task="transcribe",
+# pad_token_id=tokenizer.pad_token_id,
+# eos_token_id=tokenizer.eos_token_id
+# )
+
+# # Decode the transcription
+# transcription = tokenizer.batch_decode(
+# predicted_ids,
+# skip_special_tokens=True
+# )[0].strip()
+
+# end_time = time.time()
+
+# logger.info(f"Direct transcription completed in {end_time - start_time:.2f}s: '{transcription}'")
+# return transcription
+
+# except Exception as e:
+# logger.error(f"Error during direct transcription: {e}")
+# return ""
+
+class JambonzSTTHandler:
+ def __init__(self, websocket):
+ self.websocket = websocket
+ self.audio_buffer = None
+ self.config = {}
+ self.running = True
+ self.transcription_task = None
+ self.full_transcript = ""
+ self.last_partial = ""
+
+ # Auto-final detection variables
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.silence_timeout = 1.5 # 3 seconds of silence to trigger final
+ self.min_interim_count = 1 # Minimum interim results before considering final
+ self.auto_final_task = None
+ self.accumulated_transcript = ""
+ self.final_sent = False
+
+ async def start_processing(self, start_message):
+ """Initialize with start message from jambonz"""
+ self.config = {
+ "language": start_message.get("language", "ar-EG"),
+ "format": start_message.get("format", "raw"),
+ "encoding": start_message.get("encoding", "LINEAR16"),
+ "sample_rate": start_message.get("sampleRateHz", 8000),
+ "interim_results": start_message.get("interimResults", True),
+ "options": start_message.get("options", {})
+ }
+
+ logger.info(f"STT session started with config: {self.config}")
+
+ # Initialize audio buffer
+ self.audio_buffer = JambonzAudioBuffer(
+ sample_rate=self.config["sample_rate"],
+ chunk_duration=1.0 # Process every 1 second
+ )
+
+ # Reset auto-final detection variables
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.accumulated_transcript = ""
+ self.final_sent = False
+
+ # Start background transcription task
+ self.transcription_task = asyncio.create_task(self._process_audio_chunks())
+
+ # Start auto-final detection task
+ self.auto_final_task = asyncio.create_task(self._monitor_for_auto_final())
+
+ async def stop_processing(self):
+ """Stop processing and send final transcription"""
+ self.running = False
+
+ # Cancel background tasks
+ if self.transcription_task:
+ self.transcription_task.cancel()
+ try:
+ await self.transcription_task
+ except asyncio.CancelledError:
+ pass
+
+ if self.auto_final_task:
+ self.auto_final_task.cancel()
+ try:
+ await self.auto_final_task
+ except asyncio.CancelledError:
+ pass
+
+ # Send final transcription if not already sent
+ if not self.final_sent and self.accumulated_transcript.strip():
+ await self.send_transcription(self.accumulated_transcript, is_final=True)
+
+ # Also process any remaining audio for comprehensive final transcription
+ if self.audio_buffer:
+ all_audio = self.audio_buffer.get_all_audio()
+ if len(all_audio) > 0 and not self.final_sent:
+ loop = asyncio.get_event_loop()
+ final_transcription = await loop.run_in_executor(
+ executor,
+ transcribe_chunk_direct,
+ all_audio,
+ self.config["sample_rate"]
+ )
+
+ if final_transcription.strip():
+ # Send comprehensive final transcription
+ await self.send_transcription(final_transcription, is_final=True)
+
+ logger.info("STT session ended")
+
+ async def add_audio_data(self, audio_bytes):
+ """Add audio data to buffer"""
+ if self.audio_buffer:
+ audio_data = linear16_to_audio(audio_bytes, self.config["sample_rate"])
+ self.audio_buffer.add_audio(audio_data)
+
+ async def _process_audio_chunks(self):
+ """Process audio chunks for interim results"""
+ while self.running and self.config.get("interim_results", False):
+ try:
+ if self.audio_buffer and self.audio_buffer.has_chunk_ready():
+ chunk_signal = self.audio_buffer.get_chunk_for_processing()
+ if chunk_signal is not None:
+ # Get all accumulated audio so far for complete transcription
+ all_audio = self.audio_buffer.get_all_audio()
+
+ # Only process if we have actual speech content
+ if len(all_audio) > 0 and self.audio_buffer.is_speech(all_audio[-self.audio_buffer.chunk_samples:]):
+ # Run transcription on all accumulated audio
+ loop = asyncio.get_event_loop()
+ transcription = await loop.run_in_executor(
+ executor,
+ transcribe_chunk_direct,
+ all_audio,
+ self.config["sample_rate"]
+ )
+
+ if transcription.strip() and transcription != self.last_partial:
+ self.last_partial = transcription
+ self.accumulated_transcript = transcription # Update accumulated transcript
+ self.interim_count += 1
+ self.last_interim_time = time.time()
+
+ # Send interim result
+ await self.send_transcription(transcription, is_final=False)
+
+ logger.info(f"Interim #{self.interim_count}: '{transcription}'")
+
+ # Small delay to prevent excessive processing
+ await asyncio.sleep(0.1)
+
+ except Exception as e:
+ logger.error(f"Error in chunk processing: {e}")
+ await asyncio.sleep(1)
+
+ async def _monitor_for_auto_final(self):
+ """Monitor for auto-final conditions: 3 seconds silence after 3+ interim results"""
+ while self.running:
+ try:
+ current_time = time.time()
+
+ # Check if we should send auto-final transcription
+ if (self.interim_count >= self.min_interim_count and
+ self.last_interim_time is not None and
+ (current_time - self.last_interim_time) >= self.silence_timeout and
+ not self.final_sent and
+ self.accumulated_transcript.strip()):
+
+ logger.info(f"Auto-final triggered: {self.interim_count} interim results, "
+ f"{current_time - self.last_interim_time:.1f}s silence")
+
+ # Send the accumulated transcript as final
+ await self.send_transcription(self.accumulated_transcript, is_final=True)
+ self.final_sent = True
+
+ # Reset counters for potential next utterance
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.accumulated_transcript = ""
+
+ # Check every 0.5 seconds
+ await asyncio.sleep(0.5)
+
+ except Exception as e:
+ logger.error(f"Error in auto-final monitoring: {e}")
+ await asyncio.sleep(1)
+
+ # async def send_transcription(self, text, is_final=False, confidence=0.9):
+ # """Send transcription in jambonz format with Arabic number conversion"""
+ # try:
+ # # Convert Arabic numbers to digits before sending
+ # original_text = text
+ # converted_text = convert_arabic_numbers_in_sentence(text)
+
+ # # Log the conversion if numbers were found and converted
+ # if original_text != converted_text:
+ # logger.info(f"Arabic numbers converted: '{original_text}' -> '{converted_text}'")
+
+ # message = {
+ # "type": "transcription",
+ # "is_final": is_final,
+ # "alternatives": [
+ # {
+ # "transcript": converted_text,
+ # "confidence": confidence
+ # }
+ # ],
+ # "language": self.config.get("language", "ar-EG"),
+ # "channel": 1
+ # }
+
+ # await self.websocket.send(json.dumps(message))
+ # logger.info(f"Sent {'FINAL' if is_final else 'interim'} transcription: '{converted_text}'")
+
+ # if is_final:
+ # self.final_sent = True
+
+ # except Exception as e:
+ # logger.error(f"Error sending transcription: {e}")
+
+
+
+ async def send_transcription(self, text, is_final=False, confidence=0.9):
+ """Send transcription in jambonz format with Arabic number conversion, only for final results"""
+ try:
+ if not is_final:
+ # Do nothing for interim results
+ logger.debug("Skipping interim transcription (not final).")
+ return
+
+ # Convert Arabic numbers only for final transcripts
+ original_text = text
+ converted_text = convert_arabic_numbers_in_sentence(text)
+
+ # Log the conversion if numbers were found and converted
+ if original_text != converted_text:
+ logger.info(f"Arabic numbers converted: '{original_text}' -> '{converted_text}'")
+
+ message = {
+ "type": "transcription",
+ "is_final": True,
+ "alternatives": [
+ {
+ "transcript": original_text,#converted_text,
+ "confidence": confidence
+ }
+ ],
+ "language": self.config.get("language", "ar-EG"),
+ "channel": 1
+ }
+
+ # Send only final messages
+ await self.websocket.send(json.dumps(message))
+ logger.info(f"Sent FINAL transcription: '{converted_text}'")
+
+ self.final_sent = True
+
+ except Exception as e:
+ logger.error(f"Error sending transcription: {e}")
+
+
+
+
+ async def send_error(self, error_message):
+ """Send error message in jambonz format"""
+ try:
+ message = {
+ "type": "error",
+ "error": error_message
+ }
+ await self.websocket.send(json.dumps(message))
+ logger.error(f"Sent error: {error_message}")
+ except Exception as e:
+ logger.error(f"Error sending error message: {e}")
+
+async def handle_jambonz_websocket(websocket):
+ """Handle jambonz WebSocket connections"""
+
+ client_id = f"jambonz_{id(websocket)}"
+ logger.info(f"New jambonz connection: {client_id}")
+
+ handler = JambonzSTTHandler(websocket)
+
+ try:
+ async for message in websocket:
+ try:
+ if isinstance(message, str):
+ # Handle JSON control messages
+ data = json.loads(message)
+ message_type = data.get("type")
+
+ if message_type == "start":
+ logger.info(f"Received start message: {data}")
+ await handler.start_processing(data)
+
+ elif message_type == "stop":
+ logger.info("Received stop message")
+ await handler.stop_processing()
+ # Close websocket after final transcription
+ await websocket.close(code=1000, reason="Session completed")
+ break
+
+ else:
+ logger.warning(f"Unknown message type: {message_type}")
+ await handler.send_error(f"Unknown message type: {message_type}")
+
+ else:
+ # Handle binary audio data (LINEAR16 PCM)
+ if handler.audio_buffer is None:
+ await handler.send_error("Received audio before start message")
+ continue
+
+ await handler.add_audio_data(message)
+
+ except json.JSONDecodeError as e:
+ logger.error(f"JSON decode error: {e}")
+ await handler.send_error(f"Invalid JSON: {str(e)}")
+ except Exception as e:
+ logger.error(f"Error processing message: {e}")
+ await handler.send_error(f"Processing error: {str(e)}")
+
+ except websockets.exceptions.ConnectionClosed:
+ logger.info(f"jambonz connection closed: {client_id}")
+ except Exception as e:
+ logger.error(f"jambonz WebSocket error: {e}")
+ try:
+ await handler.send_error(str(e))
+ except:
+ pass
+ finally:
+ if handler.running:
+ await handler.stop_processing()
+ logger.info(f"jambonz connection ended: {client_id}")
+
+async def main():
+ """Start the jambonz STT WebSocket server"""
+ logger.info("Starting Jambonz Custom STT WebSocket server on port 3006...")
+
+ # Start WebSocket server
+ server = await websockets.serve(
+ handle_jambonz_websocket,
+ "0.0.0.0",
+ 3006,
+ ping_interval=20,
+ ping_timeout=10,
+ close_timeout=10
+ )
+
+ logger.info("Jambonz Custom STT WebSocket server started on ws://0.0.0.0:3006")
+ logger.info("Ready to handle jambonz STT requests")
+ logger.info("- Expects LINEAR16 PCM audio at 8kHz")
+ logger.info("- Supports interim results with auto-final detection")
+ logger.info("- Auto-final: 3+ interim results + 1.3s silence")
+ logger.info("- Resamples to 16kHz for Whisper processing")
+ logger.info("- Converts Arabic numbers to digits before sending")
+
+ # Wait for the server to close
+ await server.wait_closed()
+
+if __name__ == "__main__":
+ print("=" * 60)
+ print("Jambonz Custom STT Server with Whisper + Arabic Numbers")
+ print("=" * 60)
+ print(f"Model: {MODEL_NAME}")
+ print(f"Device: {device}")
+ print("WebSocket Port: 3006")
+ print("Protocol: jambonz STT API")
+ print("Audio Format: LINEAR16 PCM @ 8kHz")
+ print("Auto-Final: 2+ speech activities + 1.3s silence")
+ print("Arabic Numbers: Converted to digits in FINAL transcriptions only")
+ print("Interim Results: DISABLED (final transcription only)")
+ if arabic_numbers_available:
+ print("✓ pyarabic library available for number conversion")
+ else:
+ print("✗ pyarabic library not available - install with: pip install pyarabic")
+ print("=" * 60)
+
+ try:
+ asyncio.run(main())
+ except KeyboardInterrupt:
+ print("\nShutting down server...")
+ except Exception as e:
+ print(f"Server error: {e}")
diff --git a/asr_websocket_client.html b/asr_websocket_client.html
new file mode 100644
index 0000000000000000000000000000000000000000..d45a138e5f3471f314ed4eab245089fb8d1709b5
--- /dev/null
+++ b/asr_websocket_client.html
@@ -0,0 +1,606 @@
+
+
+
+
+
+ ASR WebSocket Testing Client
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Disconnected
+
+
+
+
+
+
+
+
+
+
ASR Response:
+
Waiting for audio input...
+
+
+
+
+
+
\ No newline at end of file
diff --git a/best_nemo_whisper_jambonz.py b/best_nemo_whisper_jambonz.py
new file mode 100644
index 0000000000000000000000000000000000000000..10abf4a09651741ad74bf2fd23fee2b44ac65969
--- /dev/null
+++ b/best_nemo_whisper_jambonz.py
@@ -0,0 +1,1338 @@
+import torch
+import asyncio
+import websockets
+import json
+import threading
+import numpy as np
+import logging
+import time
+import tempfile
+import os
+import re
+from concurrent.futures import ThreadPoolExecutor
+import subprocess
+import struct
+
+# NeMo imports
+import nemo.collections.asr as nemo_asr
+import soundfile as sf
+
+# Whisper imports
+# from transformers import AutoModelForSpeechSeq2Seq, AutoProcessor, WhisperTokenizer, pipeline
+from transformers import AutoModelForSpeechSeq2Seq, AutoProcessor
+
+
+# Arabic number conversion imports for Whisper
+try:
+ from pyarabic.number import text2number
+ arabic_numbers_available = True
+ print("✓ pyarabic library available for Whisper number conversion")
+except ImportError:
+ arabic_numbers_available = False
+ print("✗ pyarabic not available - install with: pip install pyarabic")
+ print("Arabic numbers will not be converted to digits for Whisper")
+
+# Set up logging
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+
+# ===== NeMo Arabic number mapping =====
+arabic_numbers_nemo = {
+ # Basic digits
+ "سفر": "0", "فيرو": "0", "هيرو": "0","صفر": "0", "زيرو": "0", "٠": "0","زيو": "0","زير": "0","زير": "0","زر": "0","زروا": "0","زرا": "0","زيره ": "0","زرو ": "0",
+ "واحد": "1", "واحدة": "1", "١": "1",
+ "اتنين": "2", "اثنين": "2", "إثنين": "2", "اثنان": "2", "إثنان": "2", "٢": "2",
+ "تلاتة": "3", "ثلاثة": "3", "٣": "3","تلاته": "3","ثلاثه": "3","ثلاثا": "3","تلاتا": "3",
+ "اربعة": "4", "أربعة": "4", "٤": "4","اربعه": "4","أربعه": "4","أربع": "4","اربع": "4","اربعا": "4","أربعا": "4",
+ "خمسة": "5", "خمسه": "5", "٥": "5", "خمس": "5", "خمسا": "5",
+ "ستة": "6", "سته": "6", "٦": "6", "ست": "6", "ستّا": "6", "ستةً": "6",
+ "سبعة": "7", "سبعه": "7", "٧": "7", "سبع": "7", "سبعا": "7",
+ "ثمانية": "8", "ثمانيه": "8", "٨": "8", "ثمان": "8", "ثمنية": "8", "ثمنيه": "8", "ثمانيا": "8", "ثمن": "8",
+ "تسعة": "9", "تسعه": "9", "٩": "9", "تسع": "9", "تسعا": "9",
+
+ # Teens
+ "عشرة": "10", "١٠": "10",
+ "حداشر": "11", "احد عشر": "11","احداشر": "11",
+ "اتناشر": "12", "اثنا عشر": "12",
+ "تلتاشر": "13", "ثلاثة عشر": "13",
+ "اربعتاشر": "14", "أربعة عشر": "14",
+ "خمستاشر": "15", "خمسة عشر": "15",
+ "ستاشر": "16", "ستة عشر": "16",
+ "سبعتاشر": "17", "سبعة عشر": "17",
+ "طمنتاشر": "18", "ثمانية عشر": "18",
+ "تسعتاشر": "19", "تسعة عشر": "19",
+
+ # Tens
+ "عشرين": "20", "٢٠": "20",
+ "تلاتين": "30", "ثلاثين": "30", "٣٠": "30",
+ "اربعين": "40", "أربعين": "40", "٤٠": "40",
+ "خمسين": "50", "٥٠": "50",
+ "ستين": "60", "٦٠": "60",
+ "سبعين": "70", "٧٠": "70",
+ "تمانين": "80", "ثمانين": "80", "٨٠": "80","تمانون": "80","ثمانون": "80",
+ "تسعين": "90", "٩٠": "90",
+
+ # Hundreds
+ "مية": "100", "مائة": "100", "مئة": "100", "١٠٠": "100",
+ "ميتين": "200", "مائتين": "200",
+ "تلاتمية": "300", "ثلاثمائة": "300",
+ "اربعمية": "400", "أربعمائة": "400",
+ "خمسمية": "500", "خمسمائة": "500",
+ "ستمية": "600", "ستمائة": "600",
+ "سبعمية": "700", "سبعمائة": "700",
+ "تمانمية": "800", "ثمانمائة": "800",
+ "تسعمية": "900", "تسعمائة": "900",
+
+ # Thousands
+ "ألف": "1000", "الف": "1000", "١٠٠٠": "1000",
+ "ألفين": "2000", "الفين": "2000",
+ "تلات تلاف": "3000", "ثلاثة آلاف": "3000",
+ "اربعة آلاف": "4000", "أربعة آلاف": "4000",
+ "خمسة آلاف": "5000",
+ "ستة آلاف": "6000",
+ "سبعة آلاف": "7000",
+ "تمانية آلاف": "8000", "ثمانية آلاف": "8000",
+ "تسعة آلاف": "9000",
+
+ # Large numbers
+ "عشرة آلاف": "10000",
+ "مية ألف": "100000", "مائة ألف": "100000",
+ "مليون": "1000000", "١٠٠٠٠٠٠": "1000000",
+ "ملايين": "1000000",
+ "مليار": "1000000000", "١٠٠٠٠٠٠٠٠٠": "1000000000"
+}
+
+def replace_arabic_numbers_nemo(text: str) -> str:
+ """Convert Arabic number words to digits for NeMo"""
+ for word, digit in arabic_numbers_nemo.items():
+ text = re.sub(rf"\b{word}\b", digit, text)
+ return text
+
+def convert_arabic_numbers_whisper(sentence: str) -> str:
+ """
+ Replace Arabic number words in a sentence with digits for Whisper,
+ preserving all other words and punctuation.
+ """
+ if not arabic_numbers_available or not sentence.strip():
+ return sentence
+
+ try:
+ # Normalization step
+ replacements = {
+ "اربعة": "أربعة", "اربع": "أربع", "اثنين": "اثنان",
+ "اتنين": "اثنان", "ثلاث": "ثلاثة", "خمس": "خمسة",
+ "ست": "ستة", "سبع": "سبعة", "ثمان": "ثمانية",
+ "تسع": "تسعة", "عشر": "عشرة",
+ }
+ for wrong, correct in replacements.items():
+ sentence = re.sub(rf"\b{wrong}\b", correct, sentence)
+
+ # Split by whitespace but keep spaces
+ words = re.split(r'(\s+)', sentence)
+ converted_words = []
+
+ for word in words:
+ stripped = word.strip()
+ if not stripped: # skip spaces
+ converted_words.append(word)
+ continue
+
+ try:
+ num = text2number(stripped)
+ if isinstance(num, int):
+ if num != 0 or stripped == "صفر":
+ converted_words.append(str(num))
+ else:
+ converted_words.append(word)
+ else:
+ converted_words.append(word)
+ except Exception:
+ converted_words.append(word)
+
+ return ''.join(converted_words)
+
+ except Exception as e:
+ logger.warning(f"Error converting Arabic numbers: {e}")
+ return sentence
+
+# Global models
+asr_model_nemo = None
+whisper_model = None
+whisper_processor = None
+whisper_tokenizer = None
+device = None
+torch_dtype = None
+
+def initialize_models():
+ """Initialize both NeMo and Whisper models"""
+ global asr_model_nemo, whisper_model, whisper_processor, whisper_tokenizer, device, torch_dtype
+
+ # Initialize device settings
+ device = "cuda" if torch.cuda.is_available() else "cpu"
+ torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
+
+ logger.info(f"Using device: {device}")
+ logger.info(f"CUDA available: {torch.cuda.is_available()}")
+
+ # Initialize NeMo model
+ logger.info("Loading NeMo FastConformer Arabic ASR model...")
+ model_path = "stt_ar_fastconformer_hybrid_large_pcd_v1.0.nemo"
+
+ if os.path.exists(model_path):
+ try:
+ asr_model_nemo = nemo_asr.models.EncDecCTCModel.restore_from(model_path)
+ asr_model_nemo.eval()
+ logger.info("✓ NeMo FastConformer model loaded successfully")
+ except Exception as e:
+ logger.error(f"Failed to load NeMo model: {e}")
+ asr_model_nemo = None
+ else:
+ logger.warning(f"NeMo model not found at: {model_path}")
+ asr_model_nemo = None
+
+ # Initialize Whisper model
+ # from transformers import AutoModelForSpeechSeq2Seq, AutoProcessor
+
+ logger.info("Loading Whisper large-v3 model...")
+ MODEL_NAME = "alaatiger989/FT_Arabic_Whisper_V1_1"
+
+ try:
+ # Try with flash attention first
+ try:
+ import flash_attn
+ whisper_model = AutoModelForSpeechSeq2Seq.from_pretrained(
+ MODEL_NAME,
+ torch_dtype=torch_dtype,
+ low_cpu_mem_usage=True,
+ use_safetensors=True,
+ attn_implementation="flash_attention_2"
+ )
+ logger.info("✓ Whisper loaded with flash attention")
+ except:
+ whisper_model = AutoModelForSpeechSeq2Seq.from_pretrained(
+ MODEL_NAME,
+ torch_dtype=torch_dtype,
+ low_cpu_mem_usage=True,
+ use_safetensors=True
+ )
+ logger.info("✓ Whisper loaded with standard attention")
+
+ whisper_model.to(device)
+ whisper_processor = AutoProcessor.from_pretrained(MODEL_NAME)
+
+ # Use processor.tokenizer, don’t reload separately
+ whisper_tokenizer = whisper_processor.tokenizer
+
+ logger.info("✓ Whisper model + tokenizer loaded successfully")
+
+ except Exception as e:
+ logger.error(f"Failed to load Whisper model: {e}")
+ whisper_model = None
+
+
+
+
+ # logger.info("Loading Whisper large-v3-turbo model...")
+ # MODEL_NAME = "openai/whisper-large-v3-turbo"
+
+ # try:
+ # # Try with flash attention first
+ # try:
+ # import flash_attn
+ # whisper_model = AutoModelForSpeechSeq2Seq.from_pretrained(
+ # MODEL_NAME,
+ # torch_dtype=torch_dtype,
+ # low_cpu_mem_usage=True,
+ # use_safetensors=True,
+ # attn_implementation="flash_attention_2"
+ # )
+ # logger.info("✓ Whisper loaded with flash attention")
+ # except:
+ # whisper_model = AutoModelForSpeechSeq2Seq.from_pretrained(
+ # MODEL_NAME,
+ # torch_dtype=torch_dtype,
+ # low_cpu_mem_usage=True,
+ # use_safetensors=True
+ # )
+ # logger.info("✓ Whisper loaded with standard attention")
+
+ # whisper_model.to(device)
+ # whisper_processor = AutoProcessor.from_pretrained(MODEL_NAME)
+ # whisper_tokenizer = WhisperTokenizer.from_pretrained(MODEL_NAME)
+ # logger.info("✓ Whisper model loaded successfully")
+
+ # except Exception as e:
+ # logger.error(f"Failed to load Whisper model: {e}")
+ # whisper_model = None
+
+# Initialize models on startup
+initialize_models()
+
+# Thread pool for processing
+executor = ThreadPoolExecutor(max_workers=4)
+
+# class JambonzAudioBuffer:
+# def __init__(self, sample_rate=8000, chunk_duration=1.0):
+# self.sample_rate = sample_rate
+# self.chunk_duration = chunk_duration
+# self.chunk_samples = int(chunk_duration * sample_rate)
+
+# self.buffer = np.array([], dtype=np.float32)
+# self.lock = threading.Lock()
+# self.total_audio = np.array([], dtype=np.float32)
+
+# # Voice Activity Detection
+# self.silence_threshold = 0.05
+# self.min_speech_samples = int(0.5 * sample_rate)
+
+# def add_audio(self, audio_data):
+# with self.lock:
+# self.buffer = np.concatenate([self.buffer, audio_data])
+# self.total_audio = np.concatenate([self.total_audio, audio_data])
+
+# def has_chunk_ready(self):
+# with self.lock:
+# return len(self.buffer) >= self.chunk_samples
+
+# def is_speech(self, audio_chunk):
+# """Simple VAD based on energy"""
+# if len(audio_chunk) < self.min_speech_samples:
+# return False
+# energy = np.mean(np.abs(audio_chunk))
+# return energy > self.silence_threshold
+
+# def get_chunk_for_processing(self):
+# """Get audio chunk for processing"""
+# with self.lock:
+# if len(self.buffer) < self.chunk_samples:
+# return None
+# return np.array([1]) # Signal that chunk is ready
+
+# def get_all_audio(self):
+# """Get all accumulated audio"""
+# with self.lock:
+# return self.total_audio.copy()
+
+# def clear(self):
+# with self.lock:
+# self.buffer = np.array([], dtype=np.float32)
+# self.total_audio = np.array([], dtype=np.float32)
+
+# def reset_for_new_segment(self):
+# """Reset buffers for new transcription segment"""
+# with self.lock:
+# self.buffer = np.array([], dtype=np.float32)
+# self.total_audio = np.array([], dtype=np.float32)
+
+class JambonzAudioBuffer:
+ def __init__(self, sample_rate=8000, chunk_duration=1.0):
+ self.sample_rate = sample_rate
+ self.chunk_duration = chunk_duration
+ self.chunk_samples = int(chunk_duration * sample_rate)
+
+ self.buffer = np.array([], dtype=np.float32)
+ self.lock = threading.Lock()
+ self.total_audio = np.array([], dtype=np.float32)
+
+ # Voice Activity Detection - ADJUSTED FOR WHISPER
+ self.silence_threshold = 0.01 # Lower threshold for Whisper
+ self.min_speech_samples = int(0.3 * sample_rate) # 300ms minimum speech
+
+ def add_audio(self, audio_data):
+ with self.lock:
+ self.buffer = np.concatenate([self.buffer, audio_data])
+ self.total_audio = np.concatenate([self.total_audio, audio_data])
+
+ # Log audio addition for debugging
+ logger.debug(f"Added {len(audio_data)} audio samples, total: {len(self.total_audio)}")
+
+ def has_chunk_ready(self):
+ with self.lock:
+ ready = len(self.buffer) >= self.chunk_samples
+ if ready:
+ logger.debug(f"Chunk ready: {len(self.buffer)} >= {self.chunk_samples}")
+ return ready
+
+ def is_speech(self, audio_chunk):
+ """Enhanced VAD based on energy - better for Whisper"""
+ if len(audio_chunk) < self.min_speech_samples:
+ logger.debug(f"Audio too short for VAD: {len(audio_chunk)} < {self.min_speech_samples}")
+ return False
+
+ # Calculate RMS energy
+ rms_energy = np.sqrt(np.mean(audio_chunk ** 2))
+
+ # Also check peak amplitude
+ peak_amplitude = np.max(np.abs(audio_chunk))
+
+ is_speech = rms_energy > self.silence_threshold or peak_amplitude > (self.silence_threshold * 2)
+
+ logger.debug(f"VAD check - RMS: {rms_energy:.4f}, Peak: {peak_amplitude:.4f}, "
+ f"Threshold: {self.silence_threshold}, Speech: {is_speech}")
+
+ return is_speech
+
+ def get_chunk_for_processing(self):
+ """Get audio chunk for processing"""
+ with self.lock:
+ if len(self.buffer) < self.chunk_samples:
+ return None
+
+ logger.debug(f"Returning processing signal, buffer size: {len(self.buffer)}")
+ return np.array([1]) # Signal that chunk is ready
+
+ def get_all_audio(self):
+ """Get all accumulated audio"""
+ with self.lock:
+ audio_copy = self.total_audio.copy()
+ logger.debug(f"Returning {len(audio_copy)} total audio samples")
+ return audio_copy
+
+ def clear(self):
+ with self.lock:
+ self.buffer = np.array([], dtype=np.float32)
+ self.total_audio = np.array([], dtype=np.float32)
+ logger.debug("Audio buffer cleared")
+
+ def reset_for_new_segment(self):
+ """Reset buffers for new transcription segment"""
+ with self.lock:
+ self.buffer = np.array([], dtype=np.float32)
+ self.total_audio = np.array([], dtype=np.float32)
+ logger.debug("Audio buffer reset for new segment")
+
+def linear16_to_audio(audio_bytes, sample_rate=8000):
+ """Convert LINEAR16 PCM bytes to numpy array"""
+ try:
+ audio_array = np.frombuffer(audio_bytes, dtype=np.int16)
+ audio_array = audio_array.astype(np.float32) / 32768.0
+ return audio_array
+ except Exception as e:
+ logger.error(f"Error converting LINEAR16 to audio: {e}")
+ return np.array([], dtype=np.float32)
+
+def resample_audio(audio_data, source_rate, target_rate):
+ """Resample audio to target sample rate"""
+ if source_rate == target_rate:
+ return audio_data
+
+ if source_rate == 8000 and target_rate == 16000:
+ # Simple 2x upsampling for common case
+ upsampled = np.repeat(audio_data, 2)
+ return upsampled.astype(np.float32)
+
+ # Fallback: Linear interpolation resampling
+ ratio = target_rate / source_rate
+ indices = np.arange(0, len(audio_data), 1/ratio)
+ indices = indices[indices < len(audio_data)]
+ resampled = np.interp(indices, np.arange(len(audio_data)), audio_data)
+
+ return resampled.astype(np.float32)
+
+def transcribe_with_nemo(audio_data, source_sample_rate=8000, target_sample_rate=16000):
+ """Transcribe audio using NeMo FastConformer"""
+ try:
+ if len(audio_data) == 0 or asr_model_nemo is None:
+ return ""
+
+ # Resample to 16kHz (NeMo models typically expect 16kHz)
+ resampled_audio = resample_audio(audio_data, source_sample_rate, target_sample_rate)
+
+ # Skip very short audio
+ min_samples = int(0.3 * target_sample_rate)
+ if len(resampled_audio) < min_samples:
+ return ""
+
+ start_time = time.time()
+
+ # Save audio to temporary file (NeMo expects file path)
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".wav") as tmp_file:
+ sf.write(tmp_file.name, resampled_audio, target_sample_rate)
+ tmp_path = tmp_file.name
+
+ try:
+ # Transcribe with NeMo
+ result = asr_model_nemo.transcribe([tmp_path])
+
+ if result and len(result) > 0:
+ # Handle different NeMo result formats
+ if hasattr(result[0], 'text'):
+ raw_text = result[0].text
+ elif isinstance(result[0], str):
+ raw_text = result[0]
+ else:
+ raw_text = str(result[0])
+
+ if not isinstance(raw_text, str):
+ raw_text = str(raw_text)
+
+ if raw_text and raw_text.strip():
+ # Convert Arabic numbers to digits for NeMo
+ cleaned_text = replace_arabic_numbers_nemo(raw_text)
+ end_time = time.time()
+
+ if cleaned_text.strip():
+ logger.info(f"NeMo transcription: '{cleaned_text}' (processed in {end_time - start_time:.2f}s)")
+
+ return cleaned_text.strip()
+
+ finally:
+ # Clean up temporary file
+ if os.path.exists(tmp_path):
+ os.remove(tmp_path)
+
+ return ""
+
+ except Exception as e:
+ logger.error(f"Error during NeMo transcription: {e}")
+ return ""
+
+def transcribe_with_whisper(audio_data, source_sample_rate=8000, target_sample_rate=16000):
+ """Transcribe audio chunk using Whisper model directly"""
+ try:
+ if len(audio_data) == 0 or whisper_model is None:
+ return ""
+
+ # Resample from 8kHz to 16kHz for Whisper
+ resampled_audio = resample_audio(audio_data, source_sample_rate, target_sample_rate)
+
+ # Ensure minimum length for Whisper
+ min_samples = int(0.1 * target_sample_rate) # 100ms minimum
+ if len(resampled_audio) < min_samples:
+ return ""
+
+ start_time = time.time()
+
+ # Prepare input features with proper dtype
+ input_features = whisper_processor(
+ resampled_audio,
+ sampling_rate=target_sample_rate,
+ return_tensors="pt"
+ ).input_features
+
+ # Ensure correct dtype and device
+ input_features = input_features.to(device=device, dtype=torch_dtype)
+
+ # Create attention mask to avoid warnings
+ attention_mask = torch.ones(
+ input_features.shape[:-1],
+ dtype=torch.long,
+ device=device
+ )
+
+ # Generate transcription using model directly
+ with torch.no_grad():
+ predicted_ids = whisper_model.generate(
+ input_features,
+ attention_mask=attention_mask,
+ max_new_tokens=128,
+ do_sample=False,
+ # temperature=0.0,
+ num_beams=1,
+ language="english",
+ task="translate",
+ pad_token_id=whisper_tokenizer.pad_token_id,
+ eos_token_id=whisper_tokenizer.eos_token_id
+ )
+
+ # Decode the transcription
+ transcription = whisper_tokenizer.batch_decode(
+ predicted_ids,
+ skip_special_tokens=True
+ )[0].strip()
+
+ end_time = time.time()
+
+ logger.info(f"Whisper transcription completed in {end_time - start_time:.2f}s: '{transcription}'")
+ return transcription
+
+ except Exception as e:
+ logger.error(f"Error during Whisper transcription: {e}")
+ return ""
+
+class UnifiedSTTHandler:
+ def __init__(self, websocket):
+ self.websocket = websocket
+ self.audio_buffer = None
+ self.config = {}
+ self.running = False
+ self.transcription_task = None
+ self.use_nemo = False # Flag to determine which model to use
+
+ # Auto-final detection variables
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.silence_timeout = 2.9
+ self.min_interim_count = 1
+ self.auto_final_task = None
+ self.accumulated_transcript = ""
+ self.final_sent = False
+ self.segment_number = 0
+ self.last_partial = ""
+
+ # Processing tracking
+ self.processing_count = 0
+
+ # Add this debugging method to your UnifiedSTTHandler class
+
+ async def add_audio_data(self, audio_bytes):
+ """Add audio data to buffer with enhanced debugging"""
+ if self.audio_buffer and self.running:
+ audio_data = linear16_to_audio(audio_bytes, self.config["sample_rate"])
+ self.audio_buffer.add_audio(audio_data)
+
+ model_name = "NeMo" if self.use_nemo else "Whisper"
+
+ # Debug logging every few audio packets
+ if len(audio_data) > 0:
+ total_samples = len(self.audio_buffer.get_all_audio())
+ total_seconds = total_samples / self.config["sample_rate"]
+
+ # Log every second of audio
+ if int(total_seconds) != getattr(self, '_last_logged_second', -1):
+ logger.info(f"{model_name} - Accumulated {total_seconds:.1f}s of audio ({total_samples} samples)")
+ self._last_logged_second = int(total_seconds)
+
+ # Check if we should have chunks ready
+ chunk_ready = self.audio_buffer.has_chunk_ready()
+ logger.info(f"{model_name} - Chunk ready: {chunk_ready}")
+ # async def start_processing(self, start_message):
+ # """Initialize with start message from jambonz"""
+ # self.config = {
+ # "language": start_message.get("language", "ar-EG"),
+ # "format": start_message.get("format", "raw"),
+ # "encoding": start_message.get("encoding", "LINEAR16"),
+ # "sample_rate": start_message.get("sampleRateHz", 8000),
+ # "interim_results": True, # Always enable for internal processing
+ # "options": start_message.get("options", {})
+ # }
+
+ # # Determine which model to use based on language parameter
+ # language = self.config["language"]
+ # if language == "ar-EG":
+ # logger.info("nemooooooooooooooooooooooooooo")
+ # self.use_nemo = True
+ # model_name = "NeMo FastConformer"
+ # elif language == "ar-EG-whis":
+ # logger.info("whisperrrrrrrrrrrrrrrrrrrrrrrrrrrrr")
+ # self.use_nemo = False
+ # model_name = "Whisper large-v3"
+ # else:
+ # # Default to NeMo for any other Arabic variant
+ # self.use_nemo = True
+ # model_name = "NeMo FastConformer (default)"
+
+ # logger.info(f"STT session started with {model_name} for language: {language}")
+ # logger.info(f"Config: {self.config}")
+
+ # # Check if selected model is available
+ # if self.use_nemo and asr_model_nemo is None:
+ # await self.send_error("NeMo model not available")
+ # return
+ # elif not self.use_nemo and whisper_model is None:
+ # await self.send_error("Whisper model not available")
+ # return
+
+ # # Initialize audio buffer
+ # self.audio_buffer = JambonzAudioBuffer(
+ # sample_rate=self.config["sample_rate"],
+ # chunk_duration=1.0 # 1 second chunks
+ # )
+
+ # # Reset session variables
+ # self.running = True
+ # self.interim_count = 0
+ # self.last_interim_time = None
+ # self.accumulated_transcript = ""
+ # self.final_sent = False
+ # self.segment_number = 0
+ # self.processing_count = 0
+ # self.last_partial = ""
+
+ # # Start background transcription task
+ # self.transcription_task = asyncio.create_task(self._process_audio_chunks())
+
+ # # Start auto-final detection task
+ # self.auto_final_task = asyncio.create_task(self._monitor_for_auto_final())
+
+ # Replace these methods in your UnifiedSTTHandler class
+
+ async def start_processing(self, start_message):
+ """Initialize with start message from jambonz"""
+ self.config = {
+ "language": start_message.get("language", "ar-EG"),
+ "format": start_message.get("format", "raw"),
+ "encoding": start_message.get("encoding", "LINEAR16"),
+ "sample_rate": start_message.get("sampleRateHz", 8000),
+ "interim_results": True, # Always enable for internal processing
+ "options": start_message.get("options", {})
+ }
+
+ # Determine which model to use based on language parameter
+ language = self.config["language"]
+ if language == "ar-EG":
+ logger.info("Selected NeMo FastConformer")
+ self.use_nemo = True
+ model_name = "NeMo FastConformer"
+ elif language == "ar-EG-whis":
+ logger.info("Selected Whisper large-v3")
+ self.use_nemo = False
+ model_name = "Whisper large-v3"
+ else:
+ # Default to NeMo for any other Arabic variant
+ self.use_nemo = True
+ model_name = "NeMo FastConformer (default)"
+
+ logger.info(f"STT session started with {model_name} for language: {language}")
+ logger.info(f"Config: {self.config}")
+
+ # Check if selected model is available
+ if self.use_nemo and asr_model_nemo is None:
+ await self.send_error("NeMo model not available")
+ return
+ elif not self.use_nemo and whisper_model is None:
+ await self.send_error("Whisper model not available")
+ return
+
+ # Initialize audio buffer with model-specific settings
+ if self.use_nemo:
+ chunk_duration = 1.0 # NeMo processes every 1 second
+ else:
+ chunk_duration = 2.0 # Whisper processes every 2 seconds for better accuracy
+
+ self.audio_buffer = JambonzAudioBuffer(
+ sample_rate=self.config["sample_rate"],
+ chunk_duration=chunk_duration
+ )
+
+ # Adjust VAD threshold for Whisper
+ if not self.use_nemo:
+ self.audio_buffer.silence_threshold = 0.005 # Lower threshold for Whisper
+
+ # Reset session variables
+ self.running = True
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.accumulated_transcript = ""
+ self.final_sent = False
+ self.segment_number = 0
+ self.processing_count = 0
+ self.last_partial = ""
+
+ # Start background transcription task
+ self.transcription_task = asyncio.create_task(self._process_audio_chunks())
+
+ # Start auto-final detection task
+ self.auto_final_task = asyncio.create_task(self._monitor_for_auto_final())
+
+ logger.info(f"Background tasks started for {model_name}")
+
+
+
+ async def stop_processing(self):
+ """Stop current processing session"""
+ logger.info("Stopping STT session...")
+ self.running = False
+
+ # Cancel background tasks
+ for task in [self.transcription_task, self.auto_final_task]:
+ if task:
+ task.cancel()
+ try:
+ await task
+ except asyncio.CancelledError:
+ pass
+
+ # Send final transcription if not already sent
+ if not self.final_sent and self.accumulated_transcript.strip():
+ await self.send_transcription(self.accumulated_transcript, is_final=True)
+
+ # Process any remaining audio for comprehensive final transcription
+ if self.audio_buffer:
+ all_audio = self.audio_buffer.get_all_audio()
+ if len(all_audio) > 0 and not self.final_sent:
+ loop = asyncio.get_event_loop()
+
+ if self.use_nemo:
+ final_transcription = await loop.run_in_executor(
+ executor, transcribe_with_nemo, all_audio, self.config["sample_rate"]
+ )
+ else:
+ final_transcription = await loop.run_in_executor(
+ executor, transcribe_with_whisper, all_audio, self.config["sample_rate"]
+ )
+
+ if final_transcription.strip():
+ await self.send_transcription(final_transcription, is_final=True)
+
+ # Clear audio buffer
+ if self.audio_buffer:
+ self.audio_buffer.clear()
+
+ logger.info("STT session stopped")
+
+ async def start_new_segment(self):
+ """Start a new transcription segment"""
+ self.segment_number += 1
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.accumulated_transcript = ""
+ self.final_sent = False
+ self.last_partial = ""
+ self.processing_count = 0
+
+ if self.audio_buffer:
+ self.audio_buffer.reset_for_new_segment()
+
+ logger.info(f"Started new transcription segment #{self.segment_number}")
+
+ async def add_audio_data(self, audio_bytes):
+ """Add audio data to buffer"""
+ if self.audio_buffer and self.running:
+ audio_data = linear16_to_audio(audio_bytes, self.config["sample_rate"])
+ self.audio_buffer.add_audio(audio_data)
+
+ # async def _process_audio_chunks(self):
+ # """Process audio chunks for interim results"""
+ # while self.running:
+ # try:
+ # if self.audio_buffer and self.audio_buffer.has_chunk_ready():
+ # chunk_signal = self.audio_buffer.get_chunk_for_processing()
+ # if chunk_signal is not None:
+ # all_audio = self.audio_buffer.get_all_audio()
+
+ # if len(all_audio) > 0 and self.audio_buffer.is_speech(all_audio[-self.audio_buffer.chunk_samples:]):
+ # loop = asyncio.get_event_loop()
+
+ # # Choose transcription method based on model selection
+ # if self.use_nemo:
+ # transcription = await loop.run_in_executor(
+ # executor, transcribe_with_nemo, all_audio, self.config["sample_rate"]
+ # )
+ # else:
+ # transcription = await loop.run_in_executor(
+ # executor, transcribe_with_whisper, all_audio, self.config["sample_rate"]
+ # )
+
+ # if transcription.strip():
+ # self.processing_count += 1
+ # self.accumulated_transcript = transcription
+
+ # if transcription != self.last_partial or self.interim_count == 0:
+ # self.last_partial = transcription
+ # self.interim_count += 1
+ # self.last_interim_time = time.time()
+ # logger.info(f"Updated interim_count to {self.interim_count} for transcript: '{transcription}'")
+ # else:
+ # self.last_interim_time = time.time()
+
+ # await asyncio.sleep(0.1) # Check every 100ms
+
+ # except Exception as e:
+ # logger.error(f"Error in chunk processing: {e}")
+ # await asyncio.sleep(0.1)
+
+
+ # async def _monitor_for_auto_final(self):
+ # """Monitor for auto-final conditions"""
+ # while self.running:
+ # try:
+ # current_time = time.time()
+
+ # if (self.interim_count >= self.min_interim_count and
+ # self.last_interim_time is not None and
+ # (current_time - self.last_interim_time) >= self.silence_timeout and
+ # not self.final_sent and
+ # self.accumulated_transcript.strip()):
+
+ # logger.info(f"Auto-final triggered for segment #{self.segment_number}")
+
+ # await self.send_transcription(self.accumulated_transcript, is_final=True)
+ # await self.start_new_segment()
+
+ # await asyncio.sleep(0.5) # Check every 500ms
+
+ # except Exception as e:
+ # logger.error(f"Error in auto-final monitoring: {e}")
+ # await asyncio.sleep(0.5)
+
+ # async def _process_audio_chunks(self):
+ # """Process audio chunks for interim results - FIXED for Whisper streaming"""
+ # logger.info(f"Starting audio chunk processing for {'NeMo' if self.use_nemo else 'Whisper'}")
+
+ # while self.running:
+ # try:
+ # if self.audio_buffer and self.audio_buffer.has_chunk_ready():
+ # chunk_signal = self.audio_buffer.get_chunk_for_processing()
+ # if chunk_signal is not None:
+ # all_audio = self.audio_buffer.get_all_audio()
+
+ # # Check if we have enough audio and speech activity
+ # if len(all_audio) > 0:
+ # # Get the latest chunk for VAD check
+ # latest_chunk_start = max(0, len(all_audio) - self.audio_buffer.chunk_samples)
+ # latest_chunk = all_audio[latest_chunk_start:]
+
+ # # For debugging
+ # logger.debug(f"Audio buffer size: {len(all_audio)} samples, Latest chunk: {len(latest_chunk)} samples")
+
+ # if self.audio_buffer.is_speech(latest_chunk):
+ # logger.info(f"Speech detected, processing with {'NeMo' if self.use_nemo else 'Whisper'}")
+
+ # loop = asyncio.get_event_loop()
+
+ # # Choose transcription method based on model selection
+ # if self.use_nemo:
+ # transcription = await loop.run_in_executor(
+ # executor, transcribe_with_nemo, all_audio, self.config["sample_rate"]
+ # )
+ # else:
+ # # For Whisper, ensure we process the accumulated audio
+ # transcription = await loop.run_in_executor(
+ # executor, transcribe_with_whisper, all_audio, self.config["sample_rate"]
+ # )
+
+ # logger.info(f"Transcription result: '{transcription}'")
+
+ # if transcription.strip():
+ # self.processing_count += 1
+ # self.accumulated_transcript = transcription
+
+ # if transcription != self.last_partial or self.interim_count == 0:
+ # self.last_partial = transcription
+ # self.interim_count += 1
+ # self.last_interim_time = time.time()
+ # logger.info(f"Updated interim_count to {self.interim_count} for transcript: '{transcription}'")
+ # else:
+ # self.last_interim_time = time.time()
+ # logger.info("Same transcription, updating time only")
+ # else:
+ # logger.debug("No speech detected in latest chunk")
+
+ # await asyncio.sleep(0.1) # Check every 100ms
+
+ # except Exception as e:
+ # logger.error(f"Error in chunk processing: {e}")
+ # import traceback
+ # traceback.print_exc()
+ # await asyncio.sleep(0.1)
+
+ # async def _monitor_for_auto_final(self):
+ # """Monitor for auto-final conditions - Enhanced logging"""
+ # logger.info("Starting auto-final monitoring")
+
+ # while self.running:
+ # try:
+ # current_time = time.time()
+
+ # if (self.interim_count >= self.min_interim_count and
+ # self.last_interim_time is not None and
+ # (current_time - self.last_interim_time) >= self.silence_timeout and
+ # not self.final_sent and
+ # self.accumulated_transcript.strip()):
+
+ # silence_duration = current_time - self.last_interim_time
+ # logger.info(f"Auto-final triggered for segment #{self.segment_number} - "
+ # f"Interim count: {self.interim_count}, Silence: {silence_duration:.1f}s")
+
+ # await self.send_transcription(self.accumulated_transcript, is_final=True)
+ # await self.start_new_segment()
+
+ # # Debug logging every 5 seconds
+ # if int(current_time) % 5 == 0:
+ # logger.debug(f"Auto-final status - Interim count: {self.interim_count}, "
+ # f"Last interim: {self.last_interim_time}, "
+ # f"Final sent: {self.final_sent}, "
+ # f"Transcript: '{self.accumulated_transcript[:50]}...'")
+
+ # await asyncio.sleep(0.5) # Check every 500ms
+
+ # except Exception as e:
+ # logger.error(f"Error in auto-final monitoring: {e}")
+ # await asyncio.sleep(0.5)
+
+ # async def _process_audio_chunks(self):
+ # """Process audio chunks for interim results - FIXED for both models"""
+ # model_name = "NeMo" if self.use_nemo else "Whisper"
+ # logger.info(f"Starting audio chunk processing for {model_name}")
+
+ # while self.running:
+ # try:
+ # if self.audio_buffer and self.audio_buffer.has_chunk_ready():
+ # chunk_signal = self.audio_buffer.get_chunk_for_processing()
+ # if chunk_signal is not None:
+ # all_audio = self.audio_buffer.get_all_audio()
+
+ # # Debug logging
+ # logger.debug(f"Processing chunk - Total audio: {len(all_audio)} samples")
+
+ # if len(all_audio) > 0:
+ # # Get the latest chunk for VAD check
+ # latest_chunk_start = max(0, len(all_audio) - self.audio_buffer.chunk_samples)
+ # latest_chunk = all_audio[latest_chunk_start:]
+
+ # # Check for speech activity
+ # has_speech = self.audio_buffer.is_speech(latest_chunk)
+ # logger.debug(f"Speech detection result: {has_speech}")
+
+ # if has_speech:
+ # logger.info(f"Processing audio with {model_name} - {len(all_audio)} samples")
+
+ # loop = asyncio.get_event_loop()
+ # start_time = time.time()
+
+ # try:
+ # # Choose transcription method based on model selection
+ # if self.use_nemo:
+ # transcription = await loop.run_in_executor(
+ # executor, transcribe_with_nemo, all_audio, self.config["sample_rate"]
+ # )
+ # else:
+ # # For Whisper, ensure we have enough audio
+ # if len(all_audio) >= int(0.5 * 16000): # At least 0.5 seconds at 16kHz
+ # transcription = await loop.run_in_executor(
+ # executor, transcribe_with_whisper, all_audio, self.config["sample_rate"]
+ # )
+ # else:
+ # transcription = ""
+ # logger.debug("Whisper: Not enough audio for transcription")
+
+ # process_time = time.time() - start_time
+ # logger.info(f"{model_name} processing took {process_time:.2f}s, result: '{transcription}'")
+
+ # if transcription and transcription.strip():
+ # self.processing_count += 1
+ # self.accumulated_transcript = transcription
+
+ # if transcription != self.last_partial or self.interim_count == 0:
+ # self.last_partial = transcription
+ # self.interim_count += 1
+ # self.last_interim_time = time.time()
+ # logger.info(f"Updated interim_count to {self.interim_count} for transcript: '{transcription}'")
+ # else:
+ # self.last_interim_time = time.time()
+ # logger.debug("Same transcription, updating time only")
+ # else:
+ # logger.debug(f"{model_name} returned empty transcription")
+
+ # except Exception as e:
+ # logger.error(f"Error in {model_name} transcription: {e}")
+ # else:
+ # logger.debug("No speech detected in latest chunk")
+
+ # # Different sleep intervals for different models
+ # sleep_interval = 0.1 if self.use_nemo else 0.2 # Whisper can be less frequent
+ # await asyncio.sleep(sleep_interval)
+
+ # except Exception as e:
+ # logger.error(f"Error in chunk processing: {e}")
+ # import traceback
+ # traceback.print_exc()
+ # await asyncio.sleep(1) # Longer sleep on error
+
+ # Also add this to the beginning of _process_audio_chunks method:
+
+ async def _process_audio_chunks(self):
+ """Process audio chunks for interim results - with debugging"""
+ model_name = "NeMo" if self.use_nemo else "Whisper"
+ logger.info(f"Starting audio chunk processing for {model_name}")
+
+ chunk_count = 0
+
+ while self.running:
+ try:
+ if self.audio_buffer and self.audio_buffer.has_chunk_ready():
+ chunk_count += 1
+ logger.info(f"{model_name} - Processing chunk #{chunk_count}")
+
+ chunk_signal = self.audio_buffer.get_chunk_for_processing()
+ if chunk_signal is not None:
+ all_audio = self.audio_buffer.get_all_audio()
+
+ logger.info(f"{model_name} - Got {len(all_audio)} samples for processing")
+
+ if len(all_audio) > 0:
+ # Get the latest chunk for VAD check
+ latest_chunk_start = max(0, len(all_audio) - self.audio_buffer.chunk_samples)
+ latest_chunk = all_audio[latest_chunk_start:]
+
+ # Check for speech activity
+ has_speech = self.audio_buffer.is_speech(latest_chunk)
+ logger.info(f"{model_name} - Speech detected: {has_speech}")
+
+ if has_speech:
+ logger.info(f"{model_name} - Starting transcription...")
+
+ loop = asyncio.get_event_loop()
+ start_time = time.time()
+
+ try:
+ # Choose transcription method based on model selection
+ if self.use_nemo:
+ transcription = await loop.run_in_executor(
+ executor, transcribe_with_nemo, all_audio, self.config["sample_rate"]
+ )
+ else:
+ transcription = await loop.run_in_executor(
+ executor, transcribe_with_whisper, all_audio, self.config["sample_rate"]
+ )
+
+ process_time = time.time() - start_time
+ logger.info(f"{model_name} - Transcription completed in {process_time:.2f}s: '{transcription}'")
+
+ if transcription and transcription.strip():
+ self.processing_count += 1
+ self.accumulated_transcript = transcription
+
+ if transcription != self.last_partial or self.interim_count == 0:
+ self.last_partial = transcription
+ self.interim_count += 1
+ self.last_interim_time = time.time()
+ logger.info(f"{model_name} - Updated interim_count to {self.interim_count}")
+ else:
+ self.last_interim_time = time.time()
+ logger.info(f"{model_name} - Same transcription, updating time only")
+ else:
+ logger.info(f"{model_name} - No transcription result")
+
+ except Exception as e:
+ logger.error(f"{model_name} - Transcription error: {e}")
+ import traceback
+ traceback.print_exc()
+ else:
+ logger.debug(f"{model_name} - No speech in chunk")
+ else:
+ logger.warning(f"{model_name} - Chunk signal was None")
+ else:
+ # Log why chunk is not ready
+ if self.audio_buffer:
+ current_size = len(self.audio_buffer.buffer)
+ required_size = self.audio_buffer.chunk_samples
+ if current_size > 0:
+ logger.debug(f"{model_name} - Buffer: {current_size}/{required_size} samples")
+
+ await asyncio.sleep(0.1)
+
+ except Exception as e:
+ logger.error(f"{model_name} - Error in chunk processing: {e}")
+ import traceback
+ traceback.print_exc()
+ await asyncio.sleep(1)
+
+ async def _monitor_for_auto_final(self):
+ """Monitor for auto-final conditions with model-specific timeouts"""
+ model_name = "NeMo" if self.use_nemo else "Whisper"
+ timeout = 2.0 if self.use_nemo else 3.0 # Longer timeout for Whisper
+
+ logger.info(f"Starting auto-final monitoring for {model_name} (timeout: {timeout}s)")
+
+ while self.running:
+ try:
+ current_time = time.time()
+
+ if (self.interim_count >= self.min_interim_count and
+ self.last_interim_time is not None and
+ (current_time - self.last_interim_time) >= timeout and
+ not self.final_sent and
+ self.accumulated_transcript.strip()):
+
+ silence_duration = current_time - self.last_interim_time
+ logger.info(f"Auto-final triggered for segment #{self.segment_number} ({model_name}) - "
+ f"Interim count: {self.interim_count}, Silence: {silence_duration:.1f}s")
+
+ await self.send_transcription(self.accumulated_transcript, is_final=True)
+ await self.start_new_segment()
+
+ await asyncio.sleep(0.5) # Check every 500ms
+
+ except Exception as e:
+ logger.error(f"Error in auto-final monitoring: {e}")
+ await asyncio.sleep(0.5)
+
+
+
+ async def send_transcription(self, text, is_final=True, confidence=0.9):
+ """Send transcription in jambonz format"""
+ try:
+ # Apply number conversion only for Whisper
+ if not self.use_nemo and is_final:
+ original_text = text
+ converted_text = convert_arabic_numbers_whisper(text)
+
+ if original_text != converted_text:
+ logger.info(f"Whisper - Arabic numbers converted: '{original_text}' -> '{converted_text}'")
+ text = converted_text
+
+ message = {
+ "type": "transcription",
+ "is_final": True, # Always send as final
+ "alternatives": [
+ {
+ "transcript": text,
+ "confidence": confidence
+ }
+ ],
+ "language": self.config.get("language", "ar-EG"),
+ "channel": 1
+ }
+
+ await self.websocket.send(json.dumps(message))
+ self.final_sent = True
+
+ model_name = "NeMo" if self.use_nemo else "Whisper"
+ logger.info(f"Sent FINAL transcription ({model_name}): '{text}'")
+
+ except Exception as e:
+ logger.error(f"Error sending transcription: {e}")
+
+ async def send_error(self, error_message):
+ """Send error message in jambonz format"""
+ try:
+ message = {
+ "type": "error",
+ "error": error_message
+ }
+ await self.websocket.send(json.dumps(message))
+ logger.error(f"Sent error: {error_message}")
+ except Exception as e:
+ logger.error(f"Error sending error message: {e}")
+
+async def handle_jambonz_websocket(websocket):
+ """Handle jambonz WebSocket connections"""
+
+ client_id = f"jambonz_{id(websocket)}"
+ logger.info(f"New unified STT connection: {client_id}")
+
+ handler = UnifiedSTTHandler(websocket)
+
+ try:
+ async for message in websocket:
+ try:
+ if isinstance(message, str):
+ data = json.loads(message)
+ message_type = data.get("type")
+
+ if message_type == "start":
+ logger.info(f"Received start message: {data}")
+ await handler.start_processing(data)
+
+ elif message_type == "stop":
+ logger.info("Received stop message - closing WebSocket")
+ await handler.stop_processing()
+ await websocket.close(code=1000, reason="Session stopped by client")
+ break
+
+ else:
+ logger.warning(f"Unknown message type: {message_type}")
+ await handler.send_error(f"Unknown message type: {message_type}")
+
+ else:
+ # Handle binary audio data
+ if not handler.running or handler.audio_buffer is None:
+ logger.warning("Received audio data outside of active session")
+ await handler.send_error("Received audio before start message or after stop")
+ continue
+
+ await handler.add_audio_data(message)
+
+ except json.JSONDecodeError as e:
+ logger.error(f"JSON decode error: {e}")
+ await handler.send_error(f"Invalid JSON: {str(e)}")
+ except Exception as e:
+ logger.error(f"Error processing message: {e}")
+ await handler.send_error(f"Processing error: {str(e)}")
+
+ except websockets.exceptions.ConnectionClosed:
+ logger.info(f"Unified STT connection closed: {client_id}")
+ except Exception as e:
+ logger.error(f"Unified STT WebSocket error: {e}")
+ try:
+ await handler.send_error(str(e))
+ except:
+ pass
+ finally:
+ if handler.running:
+ await handler.stop_processing()
+ logger.info(f"Unified STT connection ended: {client_id}")
+
+async def main():
+ """Start the Unified Arabic STT WebSocket server"""
+ logger.info("Starting Unified Arabic STT WebSocket server on port 3007...")
+
+ # Check model availability
+ models_available = []
+ if asr_model_nemo is not None:
+ models_available.append("NeMo FastConformer (ar-EG)")
+ if whisper_model is not None:
+ models_available.append("Whisper large-v3 (ar-EG-whis)")
+
+ if not models_available:
+ logger.error("No models available! Please check model paths and installations.")
+ return
+
+ # Start WebSocket server
+ server = await websockets.serve(
+ handle_jambonz_websocket,
+ "0.0.0.0",
+ 3007,
+ ping_interval=20,
+ ping_timeout=10,
+ close_timeout=10
+ )
+
+ logger.info("Unified Arabic STT WebSocket server started on ws://0.0.0.0:3007")
+ logger.info("Ready to handle jambonz STT requests with both models")
+ logger.info("ROUTING:")
+ logger.info("- language: 'ar-EG' → NeMo FastConformer (with built-in number conversion)")
+ logger.info("- language: 'ar-EG-whis' → Whisper large-v3 (with pyarabic number conversion)")
+ logger.info("FEATURES:")
+ logger.info("- Continuous transcription with segmentation")
+ logger.info("- Voice Activity Detection")
+ logger.info("- Auto-final detection (2s silence timeout)")
+ logger.info("- Model-specific number conversion")
+ logger.info(f"AVAILABLE MODELS: {', '.join(models_available)}")
+
+ # Wait for the server to close
+ await server.wait_closed()
+
+if __name__ == "__main__":
+ print("=" * 80)
+ print("Unified Arabic STT Server (NeMo + Whisper)")
+ print("=" * 80)
+ print("WebSocket Port: 3007")
+ print("Protocol: jambonz STT API")
+ print("Audio Format: LINEAR16 PCM @ 8kHz → 16kHz")
+ print()
+ print("LANGUAGE ROUTING:")
+ print("- 'ar-EG' → NeMo FastConformer")
+ print(" • Built-in Arabic number word to digit conversion")
+ print(" • Optimized for Arabic dialects")
+ print("- 'ar-EG-whis' → Whisper large-v3")
+ print(" • pyarabic library number conversion (final transcripts only)")
+ print(" • OpenAI Whisper model")
+ print()
+ print("FEATURES:")
+ print("- Automatic model selection based on language parameter")
+ print("- Voice Activity Detection")
+ print("- Auto-final detection (2 seconds silence)")
+ print("- Model-specific number conversion strategies")
+ print("- Continuous transcription with segmentation")
+ print()
+
+ # Check model availability for startup info
+ nemo_status = "✓ Available" if asr_model_nemo is not None else "✗ Not Available"
+ whisper_status = "✓ Available" if whisper_model is not None else "✗ Not Available"
+ arabic_numbers_status = "✓ Available" if arabic_numbers_available else "✗ Not Available (install pyarabic)"
+
+ print("MODEL STATUS:")
+ print(f"- NeMo FastConformer: {nemo_status}")
+ print(f"- Whisper large-v3: {whisper_status}")
+ print(f"- pyarabic (Whisper numbers): {arabic_numbers_status}")
+ print("=" * 80)
+
+ try:
+ asyncio.run(main())
+ except KeyboardInterrupt:
+ print("\nShutting down unified server...")
+ except Exception as e:
+ print(f"Server error: {e}")
\ No newline at end of file
diff --git a/best_nemo_whisper_jambonz_denoiser.py b/best_nemo_whisper_jambonz_denoiser.py
new file mode 100644
index 0000000000000000000000000000000000000000..3746263fbc369db0448e90f2b15e68a62c4fffb3
--- /dev/null
+++ b/best_nemo_whisper_jambonz_denoiser.py
@@ -0,0 +1,1357 @@
+import torch
+import asyncio
+import websockets
+import json
+import threading
+import numpy as np
+import logging
+import time
+import tempfile
+import os
+import re
+from concurrent.futures import ThreadPoolExecutor
+import subprocess
+import struct
+
+# NeMo imports
+import nemo.collections.asr as nemo_asr
+import soundfile as sf
+
+# Whisper imports
+# from transformers import AutoModelForSpeechSeq2Seq, AutoProcessor, WhisperTokenizer, pipeline
+from transformers import AutoModelForSpeechSeq2Seq, AutoProcessor
+
+
+# Arabic number conversion imports for Whisper
+try:
+ from pyarabic.number import text2number
+ arabic_numbers_available = True
+ print("✓ pyarabic library available for Whisper number conversion")
+except ImportError:
+ arabic_numbers_available = False
+ print("✗ pyarabic not available - install with: pip install pyarabic")
+ print("Arabic numbers will not be converted to digits for Whisper")
+
+# Set up logging
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+
+# ===== NeMo Arabic number mapping =====
+arabic_numbers_nemo = {
+ # Basic digits
+ "سفر": "0", "فيرو": "0", "هيرو": "0","صفر": "0", "زيرو": "0", "٠": "0","زيو": "0","زير": "0","زير": "0","زر": "0","زروا": "0","زرا": "0","زيره ": "0","زرو ": "0",
+ "واحد": "1", "واحدة": "1", "١": "1",
+ "اتنين": "2", "اثنين": "2", "إثنين": "2", "اثنان": "2", "إثنان": "2", "٢": "2",
+ "تلاتة": "3", "ثلاثة": "3", "٣": "3","تلاته": "3","ثلاثه": "3","ثلاثا": "3","تلاتا": "3",
+ "اربعة": "4", "أربعة": "4", "٤": "4","اربعه": "4","أربعه": "4","أربع": "4","اربع": "4","اربعا": "4","أربعا": "4",
+ "خمسة": "5", "خمسه": "5", "٥": "5", "خمس": "5", "خمسا": "5",
+ "ستة": "6", "سته": "6", "٦": "6", "ست": "6", "ستّا": "6", "ستةً": "6",
+ "سبعة": "7", "سبعه": "7", "٧": "7", "سبع": "7", "سبعا": "7",
+ "ثمانية": "8", "ثمانيه": "8", "٨": "8", "ثمان": "8", "ثمنية": "8", "ثمنيه": "8", "ثمانيا": "8", "ثمن": "8",
+ "تسعة": "9", "تسعه": "9", "٩": "9", "تسع": "9", "تسعا": "9",
+
+ # Teens
+ "عشرة": "10", "١٠": "10",
+ "حداشر": "11", "احد عشر": "11","احداشر": "11",
+ "اتناشر": "12", "اثنا عشر": "12",
+ "تلتاشر": "13", "ثلاثة عشر": "13",
+ "اربعتاشر": "14", "أربعة عشر": "14",
+ "خمستاشر": "15", "خمسة عشر": "15",
+ "ستاشر": "16", "ستة عشر": "16",
+ "سبعتاشر": "17", "سبعة عشر": "17",
+ "طمنتاشر": "18", "ثمانية عشر": "18",
+ "تسعتاشر": "19", "تسعة عشر": "19",
+
+ # Tens
+ "عشرين": "20", "٢٠": "20",
+ "تلاتين": "30", "ثلاثين": "30", "٣٠": "30",
+ "اربعين": "40", "أربعين": "40", "٤٠": "40",
+ "خمسين": "50", "٥٠": "50",
+ "ستين": "60", "٦٠": "60",
+ "سبعين": "70", "٧٠": "70",
+ "تمانين": "80", "ثمانين": "80", "٨٠": "80","تمانون": "80","ثمانون": "80",
+ "تسعين": "90", "٩٠": "90",
+
+ # Hundreds
+ "مية": "100", "مائة": "100", "مئة": "100", "١٠٠": "100",
+ "ميتين": "200", "مائتين": "200",
+ "تلاتمية": "300", "ثلاثمائة": "300",
+ "اربعمية": "400", "أربعمائة": "400",
+ "خمسمية": "500", "خمسمائة": "500",
+ "ستمية": "600", "ستمائة": "600",
+ "سبعمية": "700", "سبعمائة": "700",
+ "تمانمية": "800", "ثمانمائة": "800",
+ "تسعمية": "900", "تسعمائة": "900",
+
+ # Thousands
+ "ألف": "1000", "الف": "1000", "١٠٠٠": "1000",
+ "ألفين": "2000", "الفين": "2000",
+ "تلات تلاف": "3000", "ثلاثة آلاف": "3000",
+ "اربعة آلاف": "4000", "أربعة آلاف": "4000",
+ "خمسة آلاف": "5000",
+ "ستة آلاف": "6000",
+ "سبعة آلاف": "7000",
+ "تمانية آلاف": "8000", "ثمانية آلاف": "8000",
+ "تسعة آلاف": "9000",
+
+ # Large numbers
+ "عشرة آلاف": "10000",
+ "مية ألف": "100000", "مائة ألف": "100000",
+ "مليون": "1000000", "١٠٠٠٠٠٠": "1000000",
+ "ملايين": "1000000",
+ "مليار": "1000000000", "١٠٠٠٠٠٠٠٠٠": "1000000000"
+}
+
+def replace_arabic_numbers_nemo(text: str) -> str:
+ """Convert Arabic number words to digits for NeMo"""
+ for word, digit in arabic_numbers_nemo.items():
+ text = re.sub(rf"\b{word}\b", digit, text)
+ return text
+
+def convert_arabic_numbers_whisper(sentence: str) -> str:
+ """
+ Replace Arabic number words in a sentence with digits for Whisper,
+ preserving all other words and punctuation.
+ """
+ if not arabic_numbers_available or not sentence.strip():
+ return sentence
+
+ try:
+ # Normalization step
+ replacements = {
+ "اربعة": "أربعة", "اربع": "أربع", "اثنين": "اثنان",
+ "اتنين": "اثنان", "ثلاث": "ثلاثة", "خمس": "خمسة",
+ "ست": "ستة", "سبع": "سبعة", "ثمان": "ثمانية",
+ "تسع": "تسعة", "عشر": "عشرة",
+ }
+ for wrong, correct in replacements.items():
+ sentence = re.sub(rf"\b{wrong}\b", correct, sentence)
+
+ # Split by whitespace but keep spaces
+ words = re.split(r'(\s+)', sentence)
+ converted_words = []
+
+ for word in words:
+ stripped = word.strip()
+ if not stripped: # skip spaces
+ converted_words.append(word)
+ continue
+
+ try:
+ num = text2number(stripped)
+ if isinstance(num, int):
+ if num != 0 or stripped == "صفر":
+ converted_words.append(str(num))
+ else:
+ converted_words.append(word)
+ else:
+ converted_words.append(word)
+ except Exception:
+ converted_words.append(word)
+
+ return ''.join(converted_words)
+
+ except Exception as e:
+ logger.warning(f"Error converting Arabic numbers: {e}")
+ return sentence
+
+# Global models
+asr_model_nemo = None
+whisper_model = None
+whisper_processor = None
+whisper_tokenizer = None
+device = None
+torch_dtype = None
+import torch
+from denoiser import pretrained
+
+
+def initialize_models():
+ """Initialize both NeMo and Whisper models"""
+ global asr_model_nemo, whisper_model, whisper_processor, whisper_tokenizer, device, torch_dtype, denoiser_model
+
+ # Initialize device settings
+ device = "cuda" if torch.cuda.is_available() else "cpu"
+ torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
+ device = "cuda" if torch.cuda.is_available() else "cpu"
+
+ # Load DNS64 pretrained model (auto-downloads if not cached)
+ denoiser_model = pretrained.dns64().to(device)
+ denoiser_model.eval()
+ logger.info(f"Using device: {device}")
+ logger.info(f"CUDA available: {torch.cuda.is_available()}")
+
+ # Initialize NeMo model
+ logger.info("Loading NeMo FastConformer Arabic ASR model...")
+ model_path = "stt_ar_fastconformer_hybrid_large_pcd_v1.0.nemo"
+
+ if os.path.exists(model_path):
+ try:
+ asr_model_nemo = nemo_asr.models.EncDecCTCModel.restore_from(model_path)
+ asr_model_nemo.eval()
+ logger.info("✓ NeMo FastConformer model loaded successfully")
+ except Exception as e:
+ logger.error(f"Failed to load NeMo model: {e}")
+ asr_model_nemo = None
+ else:
+ logger.warning(f"NeMo model not found at: {model_path}")
+ asr_model_nemo = None
+
+ # Initialize Whisper model
+ # from transformers import AutoModelForSpeechSeq2Seq, AutoProcessor
+
+ logger.info("Loading Whisper large-v3 model...")
+ MODEL_NAME = "alaatiger989/FT_Arabic_Whisper_V1_1"
+
+ try:
+ # Try with flash attention first
+ try:
+ import flash_attn
+ whisper_model = AutoModelForSpeechSeq2Seq.from_pretrained(
+ MODEL_NAME,
+ torch_dtype=torch_dtype,
+ low_cpu_mem_usage=True,
+ use_safetensors=True,
+ attn_implementation="flash_attention_2"
+ )
+ logger.info("✓ Whisper loaded with flash attention")
+ except:
+ whisper_model = AutoModelForSpeechSeq2Seq.from_pretrained(
+ MODEL_NAME,
+ torch_dtype=torch_dtype,
+ low_cpu_mem_usage=True,
+ use_safetensors=True
+ )
+ logger.info("✓ Whisper loaded with standard attention")
+
+ whisper_model.to(device)
+ whisper_processor = AutoProcessor.from_pretrained(MODEL_NAME)
+
+ # Use processor.tokenizer, don’t reload separately
+ whisper_tokenizer = whisper_processor.tokenizer
+
+ logger.info("✓ Whisper model + tokenizer loaded successfully")
+
+ except Exception as e:
+ logger.error(f"Failed to load Whisper model: {e}")
+ whisper_model = None
+
+
+
+
+ # logger.info("Loading Whisper large-v3-turbo model...")
+ # MODEL_NAME = "openai/whisper-large-v3-turbo"
+
+ # try:
+ # # Try with flash attention first
+ # try:
+ # import flash_attn
+ # whisper_model = AutoModelForSpeechSeq2Seq.from_pretrained(
+ # MODEL_NAME,
+ # torch_dtype=torch_dtype,
+ # low_cpu_mem_usage=True,
+ # use_safetensors=True,
+ # attn_implementation="flash_attention_2"
+ # )
+ # logger.info("✓ Whisper loaded with flash attention")
+ # except:
+ # whisper_model = AutoModelForSpeechSeq2Seq.from_pretrained(
+ # MODEL_NAME,
+ # torch_dtype=torch_dtype,
+ # low_cpu_mem_usage=True,
+ # use_safetensors=True
+ # )
+ # logger.info("✓ Whisper loaded with standard attention")
+
+ # whisper_model.to(device)
+ # whisper_processor = AutoProcessor.from_pretrained(MODEL_NAME)
+ # whisper_tokenizer = WhisperTokenizer.from_pretrained(MODEL_NAME)
+ # logger.info("✓ Whisper model loaded successfully")
+
+ # except Exception as e:
+ # logger.error(f"Failed to load Whisper model: {e}")
+ # whisper_model = None
+
+# Initialize models on startup
+initialize_models()
+def denoise_audio(audio_data, sample_rate=16000):
+ """Apply denoising using facebook/denoiser pretrained model."""
+ if denoiser_model is None or len(audio_data) == 0:
+ return audio_data
+ try:
+ audio_tensor = torch.tensor(audio_data, dtype=torch.float32, device=device).unsqueeze(0)
+ with torch.no_grad():
+ denoised_tensor = denoiser_model(audio_tensor, sample_rate=sample_rate)[0]
+ return denoised_tensor.squeeze().cpu().numpy().astype("float32")
+ except Exception as e:
+ print(f"[WARN] Denoiser failed: {e}")
+ return audio_data
+# Thread pool for processing
+executor = ThreadPoolExecutor(max_workers=4)
+
+# class JambonzAudioBuffer:
+# def __init__(self, sample_rate=8000, chunk_duration=1.0):
+# self.sample_rate = sample_rate
+# self.chunk_duration = chunk_duration
+# self.chunk_samples = int(chunk_duration * sample_rate)
+
+# self.buffer = np.array([], dtype=np.float32)
+# self.lock = threading.Lock()
+# self.total_audio = np.array([], dtype=np.float32)
+
+# # Voice Activity Detection
+# self.silence_threshold = 0.05
+# self.min_speech_samples = int(0.5 * sample_rate)
+
+# def add_audio(self, audio_data):
+# with self.lock:
+# self.buffer = np.concatenate([self.buffer, audio_data])
+# self.total_audio = np.concatenate([self.total_audio, audio_data])
+
+# def has_chunk_ready(self):
+# with self.lock:
+# return len(self.buffer) >= self.chunk_samples
+
+# def is_speech(self, audio_chunk):
+# """Simple VAD based on energy"""
+# if len(audio_chunk) < self.min_speech_samples:
+# return False
+# energy = np.mean(np.abs(audio_chunk))
+# return energy > self.silence_threshold
+
+# def get_chunk_for_processing(self):
+# """Get audio chunk for processing"""
+# with self.lock:
+# if len(self.buffer) < self.chunk_samples:
+# return None
+# return np.array([1]) # Signal that chunk is ready
+
+# def get_all_audio(self):
+# """Get all accumulated audio"""
+# with self.lock:
+# return self.total_audio.copy()
+
+# def clear(self):
+# with self.lock:
+# self.buffer = np.array([], dtype=np.float32)
+# self.total_audio = np.array([], dtype=np.float32)
+
+# def reset_for_new_segment(self):
+# """Reset buffers for new transcription segment"""
+# with self.lock:
+# self.buffer = np.array([], dtype=np.float32)
+# self.total_audio = np.array([], dtype=np.float32)
+
+class JambonzAudioBuffer:
+ def __init__(self, sample_rate=8000, chunk_duration=1.0):
+ self.sample_rate = sample_rate
+ self.chunk_duration = chunk_duration
+ self.chunk_samples = int(chunk_duration * sample_rate)
+
+ self.buffer = np.array([], dtype=np.float32)
+ self.lock = threading.Lock()
+ self.total_audio = np.array([], dtype=np.float32)
+
+ # Voice Activity Detection - ADJUSTED FOR WHISPER
+ self.silence_threshold = 0.01 # Lower threshold for Whisper
+ self.min_speech_samples = int(0.3 * sample_rate) # 300ms minimum speech
+
+ def add_audio(self, audio_data):
+ with self.lock:
+ self.buffer = np.concatenate([self.buffer, audio_data])
+ self.total_audio = np.concatenate([self.total_audio, audio_data])
+
+ # Log audio addition for debugging
+ logger.debug(f"Added {len(audio_data)} audio samples, total: {len(self.total_audio)}")
+
+ def has_chunk_ready(self):
+ with self.lock:
+ ready = len(self.buffer) >= self.chunk_samples
+ if ready:
+ logger.debug(f"Chunk ready: {len(self.buffer)} >= {self.chunk_samples}")
+ return ready
+
+ def is_speech(self, audio_chunk):
+ """Enhanced VAD based on energy - better for Whisper"""
+ if len(audio_chunk) < self.min_speech_samples:
+ logger.debug(f"Audio too short for VAD: {len(audio_chunk)} < {self.min_speech_samples}")
+ return False
+
+ # Calculate RMS energy
+ rms_energy = np.sqrt(np.mean(audio_chunk ** 2))
+
+ # Also check peak amplitude
+ peak_amplitude = np.max(np.abs(audio_chunk))
+
+ is_speech = rms_energy > self.silence_threshold or peak_amplitude > (self.silence_threshold * 2)
+
+ logger.debug(f"VAD check - RMS: {rms_energy:.4f}, Peak: {peak_amplitude:.4f}, "
+ f"Threshold: {self.silence_threshold}, Speech: {is_speech}")
+
+ return is_speech
+
+ def get_chunk_for_processing(self):
+ """Get audio chunk for processing"""
+ with self.lock:
+ if len(self.buffer) < self.chunk_samples:
+ return None
+
+ logger.debug(f"Returning processing signal, buffer size: {len(self.buffer)}")
+ return np.array([1]) # Signal that chunk is ready
+
+ def get_all_audio(self):
+ """Get all accumulated audio"""
+ with self.lock:
+ audio_copy = self.total_audio.copy()
+ logger.debug(f"Returning {len(audio_copy)} total audio samples")
+ return audio_copy
+
+ def clear(self):
+ with self.lock:
+ self.buffer = np.array([], dtype=np.float32)
+ self.total_audio = np.array([], dtype=np.float32)
+ logger.debug("Audio buffer cleared")
+
+ def reset_for_new_segment(self):
+ """Reset buffers for new transcription segment"""
+ with self.lock:
+ self.buffer = np.array([], dtype=np.float32)
+ self.total_audio = np.array([], dtype=np.float32)
+ logger.debug("Audio buffer reset for new segment")
+
+def linear16_to_audio(audio_bytes, sample_rate=8000):
+ """Convert LINEAR16 PCM bytes to numpy array"""
+ try:
+ audio_array = np.frombuffer(audio_bytes, dtype=np.int16)
+ audio_array = audio_array.astype(np.float32) / 32768.0
+ return audio_array
+ except Exception as e:
+ logger.error(f"Error converting LINEAR16 to audio: {e}")
+ return np.array([], dtype=np.float32)
+
+def resample_audio(audio_data, source_rate, target_rate):
+ """Resample audio to target sample rate"""
+ if source_rate == target_rate:
+ return audio_data
+
+ if source_rate == 8000 and target_rate == 16000:
+ # Simple 2x upsampling for common case
+ upsampled = np.repeat(audio_data, 2)
+ return upsampled.astype(np.float32)
+
+ # Fallback: Linear interpolation resampling
+ ratio = target_rate / source_rate
+ indices = np.arange(0, len(audio_data), 1/ratio)
+ indices = indices[indices < len(audio_data)]
+ resampled = np.interp(indices, np.arange(len(audio_data)), audio_data)
+
+ return resampled.astype(np.float32)
+
+def transcribe_with_nemo(audio_data, source_sample_rate=8000, target_sample_rate=16000):
+ """Transcribe audio using NeMo FastConformer"""
+ try:
+ if len(audio_data) == 0 or asr_model_nemo is None:
+ return ""
+
+ # Resample to 16kHz (NeMo models typically expect 16kHz)
+ resampled_audio = resample_audio(audio_data, source_sample_rate, target_sample_rate)
+ # --- Denoiser added ---
+ resampled_audio = denoise_audio(resampled_audio, sample_rate=target_sample_rate)
+ # Skip very short audio
+ min_samples = int(0.3 * target_sample_rate)
+ if len(resampled_audio) < min_samples:
+ return ""
+
+ start_time = time.time()
+
+ # Save audio to temporary file (NeMo expects file path)
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".wav") as tmp_file:
+ sf.write(tmp_file.name, resampled_audio, target_sample_rate)
+ tmp_path = tmp_file.name
+
+ try:
+ # Transcribe with NeMo
+ result = asr_model_nemo.transcribe([tmp_path])
+
+ if result and len(result) > 0:
+ # Handle different NeMo result formats
+ if hasattr(result[0], 'text'):
+ raw_text = result[0].text
+ elif isinstance(result[0], str):
+ raw_text = result[0]
+ else:
+ raw_text = str(result[0])
+
+ if not isinstance(raw_text, str):
+ raw_text = str(raw_text)
+
+ if raw_text and raw_text.strip():
+ # Convert Arabic numbers to digits for NeMo
+ cleaned_text = replace_arabic_numbers_nemo(raw_text)
+ end_time = time.time()
+
+ if cleaned_text.strip():
+ logger.info(f"NeMo transcription: '{cleaned_text}' (processed in {end_time - start_time:.2f}s)")
+
+ return cleaned_text.strip()
+
+ finally:
+ # Clean up temporary file
+ if os.path.exists(tmp_path):
+ os.remove(tmp_path)
+
+ return ""
+
+ except Exception as e:
+ logger.error(f"Error during NeMo transcription: {e}")
+ return ""
+
+def transcribe_with_whisper(audio_data, source_sample_rate=8000, target_sample_rate=16000):
+ """Transcribe audio chunk using Whisper model directly"""
+ try:
+ if len(audio_data) == 0 or whisper_model is None:
+ return ""
+
+ # Resample from 8kHz to 16kHz for Whisper
+ resampled_audio = resample_audio(audio_data, source_sample_rate, target_sample_rate)
+
+ # Ensure minimum length for Whisper
+ min_samples = int(0.1 * target_sample_rate) # 100ms minimum
+ if len(resampled_audio) < min_samples:
+ return ""
+
+ start_time = time.time()
+
+ # Prepare input features with proper dtype
+ input_features = whisper_processor(
+ resampled_audio,
+ sampling_rate=target_sample_rate,
+ return_tensors="pt"
+ ).input_features
+
+ # Ensure correct dtype and device
+ input_features = input_features.to(device=device, dtype=torch_dtype)
+
+ # Create attention mask to avoid warnings
+ attention_mask = torch.ones(
+ input_features.shape[:-1],
+ dtype=torch.long,
+ device=device
+ )
+
+ # Generate transcription using model directly
+ with torch.no_grad():
+ predicted_ids = whisper_model.generate(
+ input_features,
+ attention_mask=attention_mask,
+ max_new_tokens=128,
+ do_sample=False,
+ # temperature=0.0,
+ num_beams=1,
+ language="english",
+ task="translate",
+ pad_token_id=whisper_tokenizer.pad_token_id,
+ eos_token_id=whisper_tokenizer.eos_token_id
+ )
+
+ # Decode the transcription
+ transcription = whisper_tokenizer.batch_decode(
+ predicted_ids,
+ skip_special_tokens=True
+ )[0].strip()
+
+ end_time = time.time()
+
+ logger.info(f"Whisper transcription completed in {end_time - start_time:.2f}s: '{transcription}'")
+ return transcription
+
+ except Exception as e:
+ logger.error(f"Error during Whisper transcription: {e}")
+ return ""
+
+class UnifiedSTTHandler:
+ def __init__(self, websocket):
+ self.websocket = websocket
+ self.audio_buffer = None
+ self.config = {}
+ self.running = False
+ self.transcription_task = None
+ self.use_nemo = False # Flag to determine which model to use
+
+ # Auto-final detection variables
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.silence_timeout = 2.9
+ self.min_interim_count = 1
+ self.auto_final_task = None
+ self.accumulated_transcript = ""
+ self.final_sent = False
+ self.segment_number = 0
+ self.last_partial = ""
+
+ # Processing tracking
+ self.processing_count = 0
+
+ # Add this debugging method to your UnifiedSTTHandler class
+
+ async def add_audio_data(self, audio_bytes):
+ """Add audio data to buffer with enhanced debugging"""
+ if self.audio_buffer and self.running:
+ audio_data = linear16_to_audio(audio_bytes, self.config["sample_rate"])
+ self.audio_buffer.add_audio(audio_data)
+
+ model_name = "NeMo" if self.use_nemo else "Whisper"
+
+ # Debug logging every few audio packets
+ if len(audio_data) > 0:
+ total_samples = len(self.audio_buffer.get_all_audio())
+ total_seconds = total_samples / self.config["sample_rate"]
+
+ # Log every second of audio
+ if int(total_seconds) != getattr(self, '_last_logged_second', -1):
+ logger.info(f"{model_name} - Accumulated {total_seconds:.1f}s of audio ({total_samples} samples)")
+ self._last_logged_second = int(total_seconds)
+
+ # Check if we should have chunks ready
+ chunk_ready = self.audio_buffer.has_chunk_ready()
+ logger.info(f"{model_name} - Chunk ready: {chunk_ready}")
+ # async def start_processing(self, start_message):
+ # """Initialize with start message from jambonz"""
+ # self.config = {
+ # "language": start_message.get("language", "ar-EG"),
+ # "format": start_message.get("format", "raw"),
+ # "encoding": start_message.get("encoding", "LINEAR16"),
+ # "sample_rate": start_message.get("sampleRateHz", 8000),
+ # "interim_results": True, # Always enable for internal processing
+ # "options": start_message.get("options", {})
+ # }
+
+ # # Determine which model to use based on language parameter
+ # language = self.config["language"]
+ # if language == "ar-EG":
+ # logger.info("nemooooooooooooooooooooooooooo")
+ # self.use_nemo = True
+ # model_name = "NeMo FastConformer"
+ # elif language == "ar-EG-whis":
+ # logger.info("whisperrrrrrrrrrrrrrrrrrrrrrrrrrrrr")
+ # self.use_nemo = False
+ # model_name = "Whisper large-v3"
+ # else:
+ # # Default to NeMo for any other Arabic variant
+ # self.use_nemo = True
+ # model_name = "NeMo FastConformer (default)"
+
+ # logger.info(f"STT session started with {model_name} for language: {language}")
+ # logger.info(f"Config: {self.config}")
+
+ # # Check if selected model is available
+ # if self.use_nemo and asr_model_nemo is None:
+ # await self.send_error("NeMo model not available")
+ # return
+ # elif not self.use_nemo and whisper_model is None:
+ # await self.send_error("Whisper model not available")
+ # return
+
+ # # Initialize audio buffer
+ # self.audio_buffer = JambonzAudioBuffer(
+ # sample_rate=self.config["sample_rate"],
+ # chunk_duration=1.0 # 1 second chunks
+ # )
+
+ # # Reset session variables
+ # self.running = True
+ # self.interim_count = 0
+ # self.last_interim_time = None
+ # self.accumulated_transcript = ""
+ # self.final_sent = False
+ # self.segment_number = 0
+ # self.processing_count = 0
+ # self.last_partial = ""
+
+ # # Start background transcription task
+ # self.transcription_task = asyncio.create_task(self._process_audio_chunks())
+
+ # # Start auto-final detection task
+ # self.auto_final_task = asyncio.create_task(self._monitor_for_auto_final())
+
+ # Replace these methods in your UnifiedSTTHandler class
+
+ async def start_processing(self, start_message):
+ """Initialize with start message from jambonz"""
+ self.config = {
+ "language": start_message.get("language", "ar-EG"),
+ "format": start_message.get("format", "raw"),
+ "encoding": start_message.get("encoding", "LINEAR16"),
+ "sample_rate": start_message.get("sampleRateHz", 8000),
+ "interim_results": True, # Always enable for internal processing
+ "options": start_message.get("options", {})
+ }
+
+ # Determine which model to use based on language parameter
+ language = self.config["language"]
+ if language == "ar-EG":
+ logger.info("Selected NeMo FastConformer")
+ self.use_nemo = True
+ model_name = "NeMo FastConformer"
+ elif language == "ar-EG-whis":
+ logger.info("Selected Whisper large-v3")
+ self.use_nemo = False
+ model_name = "Whisper large-v3"
+ else:
+ # Default to NeMo for any other Arabic variant
+ self.use_nemo = True
+ model_name = "NeMo FastConformer (default)"
+
+ logger.info(f"STT session started with {model_name} for language: {language}")
+ logger.info(f"Config: {self.config}")
+
+ # Check if selected model is available
+ if self.use_nemo and asr_model_nemo is None:
+ await self.send_error("NeMo model not available")
+ return
+ elif not self.use_nemo and whisper_model is None:
+ await self.send_error("Whisper model not available")
+ return
+
+ # Initialize audio buffer with model-specific settings
+ if self.use_nemo:
+ chunk_duration = 1.0 # NeMo processes every 1 second
+ else:
+ chunk_duration = 2.0 # Whisper processes every 2 seconds for better accuracy
+
+ self.audio_buffer = JambonzAudioBuffer(
+ sample_rate=self.config["sample_rate"],
+ chunk_duration=chunk_duration
+ )
+
+ # Adjust VAD threshold for Whisper
+ if not self.use_nemo:
+ self.audio_buffer.silence_threshold = 0.005 # Lower threshold for Whisper
+
+ # Reset session variables
+ self.running = True
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.accumulated_transcript = ""
+ self.final_sent = False
+ self.segment_number = 0
+ self.processing_count = 0
+ self.last_partial = ""
+
+ # Start background transcription task
+ self.transcription_task = asyncio.create_task(self._process_audio_chunks())
+
+ # Start auto-final detection task
+ self.auto_final_task = asyncio.create_task(self._monitor_for_auto_final())
+
+ logger.info(f"Background tasks started for {model_name}")
+
+
+
+ async def stop_processing(self):
+ """Stop current processing session"""
+ logger.info("Stopping STT session...")
+ self.running = False
+
+ # Cancel background tasks
+ for task in [self.transcription_task, self.auto_final_task]:
+ if task:
+ task.cancel()
+ try:
+ await task
+ except asyncio.CancelledError:
+ pass
+
+ # Send final transcription if not already sent
+ if not self.final_sent and self.accumulated_transcript.strip():
+ await self.send_transcription(self.accumulated_transcript, is_final=True)
+
+ # Process any remaining audio for comprehensive final transcription
+ if self.audio_buffer:
+ all_audio = self.audio_buffer.get_all_audio()
+ if len(all_audio) > 0 and not self.final_sent:
+ loop = asyncio.get_event_loop()
+
+ if self.use_nemo:
+ final_transcription = await loop.run_in_executor(
+ executor, transcribe_with_nemo, all_audio, self.config["sample_rate"]
+ )
+ else:
+ final_transcription = await loop.run_in_executor(
+ executor, transcribe_with_whisper, all_audio, self.config["sample_rate"]
+ )
+
+ if final_transcription.strip():
+ await self.send_transcription(final_transcription, is_final=True)
+
+ # Clear audio buffer
+ if self.audio_buffer:
+ self.audio_buffer.clear()
+
+ logger.info("STT session stopped")
+
+ async def start_new_segment(self):
+ """Start a new transcription segment"""
+ self.segment_number += 1
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.accumulated_transcript = ""
+ self.final_sent = False
+ self.last_partial = ""
+ self.processing_count = 0
+
+ if self.audio_buffer:
+ self.audio_buffer.reset_for_new_segment()
+
+ logger.info(f"Started new transcription segment #{self.segment_number}")
+
+ async def add_audio_data(self, audio_bytes):
+ """Add audio data to buffer"""
+ if self.audio_buffer and self.running:
+ audio_data = linear16_to_audio(audio_bytes, self.config["sample_rate"])
+ self.audio_buffer.add_audio(audio_data)
+
+ # async def _process_audio_chunks(self):
+ # """Process audio chunks for interim results"""
+ # while self.running:
+ # try:
+ # if self.audio_buffer and self.audio_buffer.has_chunk_ready():
+ # chunk_signal = self.audio_buffer.get_chunk_for_processing()
+ # if chunk_signal is not None:
+ # all_audio = self.audio_buffer.get_all_audio()
+
+ # if len(all_audio) > 0 and self.audio_buffer.is_speech(all_audio[-self.audio_buffer.chunk_samples:]):
+ # loop = asyncio.get_event_loop()
+
+ # # Choose transcription method based on model selection
+ # if self.use_nemo:
+ # transcription = await loop.run_in_executor(
+ # executor, transcribe_with_nemo, all_audio, self.config["sample_rate"]
+ # )
+ # else:
+ # transcription = await loop.run_in_executor(
+ # executor, transcribe_with_whisper, all_audio, self.config["sample_rate"]
+ # )
+
+ # if transcription.strip():
+ # self.processing_count += 1
+ # self.accumulated_transcript = transcription
+
+ # if transcription != self.last_partial or self.interim_count == 0:
+ # self.last_partial = transcription
+ # self.interim_count += 1
+ # self.last_interim_time = time.time()
+ # logger.info(f"Updated interim_count to {self.interim_count} for transcript: '{transcription}'")
+ # else:
+ # self.last_interim_time = time.time()
+
+ # await asyncio.sleep(0.1) # Check every 100ms
+
+ # except Exception as e:
+ # logger.error(f"Error in chunk processing: {e}")
+ # await asyncio.sleep(0.1)
+
+
+ # async def _monitor_for_auto_final(self):
+ # """Monitor for auto-final conditions"""
+ # while self.running:
+ # try:
+ # current_time = time.time()
+
+ # if (self.interim_count >= self.min_interim_count and
+ # self.last_interim_time is not None and
+ # (current_time - self.last_interim_time) >= self.silence_timeout and
+ # not self.final_sent and
+ # self.accumulated_transcript.strip()):
+
+ # logger.info(f"Auto-final triggered for segment #{self.segment_number}")
+
+ # await self.send_transcription(self.accumulated_transcript, is_final=True)
+ # await self.start_new_segment()
+
+ # await asyncio.sleep(0.5) # Check every 500ms
+
+ # except Exception as e:
+ # logger.error(f"Error in auto-final monitoring: {e}")
+ # await asyncio.sleep(0.5)
+
+ # async def _process_audio_chunks(self):
+ # """Process audio chunks for interim results - FIXED for Whisper streaming"""
+ # logger.info(f"Starting audio chunk processing for {'NeMo' if self.use_nemo else 'Whisper'}")
+
+ # while self.running:
+ # try:
+ # if self.audio_buffer and self.audio_buffer.has_chunk_ready():
+ # chunk_signal = self.audio_buffer.get_chunk_for_processing()
+ # if chunk_signal is not None:
+ # all_audio = self.audio_buffer.get_all_audio()
+
+ # # Check if we have enough audio and speech activity
+ # if len(all_audio) > 0:
+ # # Get the latest chunk for VAD check
+ # latest_chunk_start = max(0, len(all_audio) - self.audio_buffer.chunk_samples)
+ # latest_chunk = all_audio[latest_chunk_start:]
+
+ # # For debugging
+ # logger.debug(f"Audio buffer size: {len(all_audio)} samples, Latest chunk: {len(latest_chunk)} samples")
+
+ # if self.audio_buffer.is_speech(latest_chunk):
+ # logger.info(f"Speech detected, processing with {'NeMo' if self.use_nemo else 'Whisper'}")
+
+ # loop = asyncio.get_event_loop()
+
+ # # Choose transcription method based on model selection
+ # if self.use_nemo:
+ # transcription = await loop.run_in_executor(
+ # executor, transcribe_with_nemo, all_audio, self.config["sample_rate"]
+ # )
+ # else:
+ # # For Whisper, ensure we process the accumulated audio
+ # transcription = await loop.run_in_executor(
+ # executor, transcribe_with_whisper, all_audio, self.config["sample_rate"]
+ # )
+
+ # logger.info(f"Transcription result: '{transcription}'")
+
+ # if transcription.strip():
+ # self.processing_count += 1
+ # self.accumulated_transcript = transcription
+
+ # if transcription != self.last_partial or self.interim_count == 0:
+ # self.last_partial = transcription
+ # self.interim_count += 1
+ # self.last_interim_time = time.time()
+ # logger.info(f"Updated interim_count to {self.interim_count} for transcript: '{transcription}'")
+ # else:
+ # self.last_interim_time = time.time()
+ # logger.info("Same transcription, updating time only")
+ # else:
+ # logger.debug("No speech detected in latest chunk")
+
+ # await asyncio.sleep(0.1) # Check every 100ms
+
+ # except Exception as e:
+ # logger.error(f"Error in chunk processing: {e}")
+ # import traceback
+ # traceback.print_exc()
+ # await asyncio.sleep(0.1)
+
+ # async def _monitor_for_auto_final(self):
+ # """Monitor for auto-final conditions - Enhanced logging"""
+ # logger.info("Starting auto-final monitoring")
+
+ # while self.running:
+ # try:
+ # current_time = time.time()
+
+ # if (self.interim_count >= self.min_interim_count and
+ # self.last_interim_time is not None and
+ # (current_time - self.last_interim_time) >= self.silence_timeout and
+ # not self.final_sent and
+ # self.accumulated_transcript.strip()):
+
+ # silence_duration = current_time - self.last_interim_time
+ # logger.info(f"Auto-final triggered for segment #{self.segment_number} - "
+ # f"Interim count: {self.interim_count}, Silence: {silence_duration:.1f}s")
+
+ # await self.send_transcription(self.accumulated_transcript, is_final=True)
+ # await self.start_new_segment()
+
+ # # Debug logging every 5 seconds
+ # if int(current_time) % 5 == 0:
+ # logger.debug(f"Auto-final status - Interim count: {self.interim_count}, "
+ # f"Last interim: {self.last_interim_time}, "
+ # f"Final sent: {self.final_sent}, "
+ # f"Transcript: '{self.accumulated_transcript[:50]}...'")
+
+ # await asyncio.sleep(0.5) # Check every 500ms
+
+ # except Exception as e:
+ # logger.error(f"Error in auto-final monitoring: {e}")
+ # await asyncio.sleep(0.5)
+
+ # async def _process_audio_chunks(self):
+ # """Process audio chunks for interim results - FIXED for both models"""
+ # model_name = "NeMo" if self.use_nemo else "Whisper"
+ # logger.info(f"Starting audio chunk processing for {model_name}")
+
+ # while self.running:
+ # try:
+ # if self.audio_buffer and self.audio_buffer.has_chunk_ready():
+ # chunk_signal = self.audio_buffer.get_chunk_for_processing()
+ # if chunk_signal is not None:
+ # all_audio = self.audio_buffer.get_all_audio()
+
+ # # Debug logging
+ # logger.debug(f"Processing chunk - Total audio: {len(all_audio)} samples")
+
+ # if len(all_audio) > 0:
+ # # Get the latest chunk for VAD check
+ # latest_chunk_start = max(0, len(all_audio) - self.audio_buffer.chunk_samples)
+ # latest_chunk = all_audio[latest_chunk_start:]
+
+ # # Check for speech activity
+ # has_speech = self.audio_buffer.is_speech(latest_chunk)
+ # logger.debug(f"Speech detection result: {has_speech}")
+
+ # if has_speech:
+ # logger.info(f"Processing audio with {model_name} - {len(all_audio)} samples")
+
+ # loop = asyncio.get_event_loop()
+ # start_time = time.time()
+
+ # try:
+ # # Choose transcription method based on model selection
+ # if self.use_nemo:
+ # transcription = await loop.run_in_executor(
+ # executor, transcribe_with_nemo, all_audio, self.config["sample_rate"]
+ # )
+ # else:
+ # # For Whisper, ensure we have enough audio
+ # if len(all_audio) >= int(0.5 * 16000): # At least 0.5 seconds at 16kHz
+ # transcription = await loop.run_in_executor(
+ # executor, transcribe_with_whisper, all_audio, self.config["sample_rate"]
+ # )
+ # else:
+ # transcription = ""
+ # logger.debug("Whisper: Not enough audio for transcription")
+
+ # process_time = time.time() - start_time
+ # logger.info(f"{model_name} processing took {process_time:.2f}s, result: '{transcription}'")
+
+ # if transcription and transcription.strip():
+ # self.processing_count += 1
+ # self.accumulated_transcript = transcription
+
+ # if transcription != self.last_partial or self.interim_count == 0:
+ # self.last_partial = transcription
+ # self.interim_count += 1
+ # self.last_interim_time = time.time()
+ # logger.info(f"Updated interim_count to {self.interim_count} for transcript: '{transcription}'")
+ # else:
+ # self.last_interim_time = time.time()
+ # logger.debug("Same transcription, updating time only")
+ # else:
+ # logger.debug(f"{model_name} returned empty transcription")
+
+ # except Exception as e:
+ # logger.error(f"Error in {model_name} transcription: {e}")
+ # else:
+ # logger.debug("No speech detected in latest chunk")
+
+ # # Different sleep intervals for different models
+ # sleep_interval = 0.1 if self.use_nemo else 0.2 # Whisper can be less frequent
+ # await asyncio.sleep(sleep_interval)
+
+ # except Exception as e:
+ # logger.error(f"Error in chunk processing: {e}")
+ # import traceback
+ # traceback.print_exc()
+ # await asyncio.sleep(1) # Longer sleep on error
+
+ # Also add this to the beginning of _process_audio_chunks method:
+
+ async def _process_audio_chunks(self):
+ """Process audio chunks for interim results - with debugging"""
+ model_name = "NeMo" if self.use_nemo else "Whisper"
+ logger.info(f"Starting audio chunk processing for {model_name}")
+
+ chunk_count = 0
+
+ while self.running:
+ try:
+ if self.audio_buffer and self.audio_buffer.has_chunk_ready():
+ chunk_count += 1
+ logger.info(f"{model_name} - Processing chunk #{chunk_count}")
+
+ chunk_signal = self.audio_buffer.get_chunk_for_processing()
+ if chunk_signal is not None:
+ all_audio = self.audio_buffer.get_all_audio()
+
+ logger.info(f"{model_name} - Got {len(all_audio)} samples for processing")
+
+ if len(all_audio) > 0:
+ # Get the latest chunk for VAD check
+ latest_chunk_start = max(0, len(all_audio) - self.audio_buffer.chunk_samples)
+ latest_chunk = all_audio[latest_chunk_start:]
+
+ # Check for speech activity
+ has_speech = self.audio_buffer.is_speech(latest_chunk)
+ logger.info(f"{model_name} - Speech detected: {has_speech}")
+
+ if has_speech:
+ logger.info(f"{model_name} - Starting transcription...")
+
+ loop = asyncio.get_event_loop()
+ start_time = time.time()
+
+ try:
+ # Choose transcription method based on model selection
+ if self.use_nemo:
+ transcription = await loop.run_in_executor(
+ executor, transcribe_with_nemo, all_audio, self.config["sample_rate"]
+ )
+ else:
+ transcription = await loop.run_in_executor(
+ executor, transcribe_with_whisper, all_audio, self.config["sample_rate"]
+ )
+
+ process_time = time.time() - start_time
+ logger.info(f"{model_name} - Transcription completed in {process_time:.2f}s: '{transcription}'")
+
+ if transcription and transcription.strip():
+ self.processing_count += 1
+ self.accumulated_transcript = transcription
+
+ if transcription != self.last_partial or self.interim_count == 0:
+ self.last_partial = transcription
+ self.interim_count += 1
+ self.last_interim_time = time.time()
+ logger.info(f"{model_name} - Updated interim_count to {self.interim_count}")
+ else:
+ self.last_interim_time = time.time()
+ logger.info(f"{model_name} - Same transcription, updating time only")
+ else:
+ logger.info(f"{model_name} - No transcription result")
+
+ except Exception as e:
+ logger.error(f"{model_name} - Transcription error: {e}")
+ import traceback
+ traceback.print_exc()
+ else:
+ logger.debug(f"{model_name} - No speech in chunk")
+ else:
+ logger.warning(f"{model_name} - Chunk signal was None")
+ else:
+ # Log why chunk is not ready
+ if self.audio_buffer:
+ current_size = len(self.audio_buffer.buffer)
+ required_size = self.audio_buffer.chunk_samples
+ if current_size > 0:
+ logger.debug(f"{model_name} - Buffer: {current_size}/{required_size} samples")
+
+ await asyncio.sleep(0.1)
+
+ except Exception as e:
+ logger.error(f"{model_name} - Error in chunk processing: {e}")
+ import traceback
+ traceback.print_exc()
+ await asyncio.sleep(1)
+
+ async def _monitor_for_auto_final(self):
+ """Monitor for auto-final conditions with model-specific timeouts"""
+ model_name = "NeMo" if self.use_nemo else "Whisper"
+ timeout = 2.0 if self.use_nemo else 3.0 # Longer timeout for Whisper
+
+ logger.info(f"Starting auto-final monitoring for {model_name} (timeout: {timeout}s)")
+
+ while self.running:
+ try:
+ current_time = time.time()
+
+ if (self.interim_count >= self.min_interim_count and
+ self.last_interim_time is not None and
+ (current_time - self.last_interim_time) >= timeout and
+ not self.final_sent and
+ self.accumulated_transcript.strip()):
+
+ silence_duration = current_time - self.last_interim_time
+ logger.info(f"Auto-final triggered for segment #{self.segment_number} ({model_name}) - "
+ f"Interim count: {self.interim_count}, Silence: {silence_duration:.1f}s")
+
+ await self.send_transcription(self.accumulated_transcript, is_final=True)
+ await self.start_new_segment()
+
+ await asyncio.sleep(0.5) # Check every 500ms
+
+ except Exception as e:
+ logger.error(f"Error in auto-final monitoring: {e}")
+ await asyncio.sleep(0.5)
+
+
+
+ async def send_transcription(self, text, is_final=True, confidence=0.9):
+ """Send transcription in jambonz format"""
+ try:
+ # Apply number conversion only for Whisper
+ if not self.use_nemo and is_final:
+ original_text = text
+ converted_text = convert_arabic_numbers_whisper(text)
+
+ if original_text != converted_text:
+ logger.info(f"Whisper - Arabic numbers converted: '{original_text}' -> '{converted_text}'")
+ text = converted_text
+
+ message = {
+ "type": "transcription",
+ "is_final": True, # Always send as final
+ "alternatives": [
+ {
+ "transcript": text,
+ "confidence": confidence
+ }
+ ],
+ "language": self.config.get("language", "ar-EG"),
+ "channel": 1
+ }
+
+ await self.websocket.send(json.dumps(message))
+ self.final_sent = True
+
+ model_name = "NeMo" if self.use_nemo else "Whisper"
+ logger.info(f"Sent FINAL transcription ({model_name}): '{text}'")
+
+ except Exception as e:
+ logger.error(f"Error sending transcription: {e}")
+
+ async def send_error(self, error_message):
+ """Send error message in jambonz format"""
+ try:
+ message = {
+ "type": "error",
+ "error": error_message
+ }
+ await self.websocket.send(json.dumps(message))
+ logger.error(f"Sent error: {error_message}")
+ except Exception as e:
+ logger.error(f"Error sending error message: {e}")
+
+async def handle_jambonz_websocket(websocket):
+ """Handle jambonz WebSocket connections"""
+
+ client_id = f"jambonz_{id(websocket)}"
+ logger.info(f"New unified STT connection: {client_id}")
+
+ handler = UnifiedSTTHandler(websocket)
+
+ try:
+ async for message in websocket:
+ try:
+ if isinstance(message, str):
+ data = json.loads(message)
+ message_type = data.get("type")
+
+ if message_type == "start":
+ logger.info(f"Received start message: {data}")
+ await handler.start_processing(data)
+
+ elif message_type == "stop":
+ logger.info("Received stop message - closing WebSocket")
+ await handler.stop_processing()
+ await websocket.close(code=1000, reason="Session stopped by client")
+ break
+
+ else:
+ logger.warning(f"Unknown message type: {message_type}")
+ await handler.send_error(f"Unknown message type: {message_type}")
+
+ else:
+ # Handle binary audio data
+ if not handler.running or handler.audio_buffer is None:
+ logger.warning("Received audio data outside of active session")
+ await handler.send_error("Received audio before start message or after stop")
+ continue
+
+ await handler.add_audio_data(message)
+
+ except json.JSONDecodeError as e:
+ logger.error(f"JSON decode error: {e}")
+ await handler.send_error(f"Invalid JSON: {str(e)}")
+ except Exception as e:
+ logger.error(f"Error processing message: {e}")
+ await handler.send_error(f"Processing error: {str(e)}")
+
+ except websockets.exceptions.ConnectionClosed:
+ logger.info(f"Unified STT connection closed: {client_id}")
+ except Exception as e:
+ logger.error(f"Unified STT WebSocket error: {e}")
+ try:
+ await handler.send_error(str(e))
+ except:
+ pass
+ finally:
+ if handler.running:
+ await handler.stop_processing()
+ logger.info(f"Unified STT connection ended: {client_id}")
+
+async def main():
+ """Start the Unified Arabic STT WebSocket server"""
+ logger.info("Starting Unified Arabic STT WebSocket server on port 3007...")
+
+ # Check model availability
+ models_available = []
+ if asr_model_nemo is not None:
+ models_available.append("NeMo FastConformer (ar-EG)")
+ if whisper_model is not None:
+ models_available.append("Whisper large-v3 (ar-EG-whis)")
+
+ if not models_available:
+ logger.error("No models available! Please check model paths and installations.")
+ return
+
+ # Start WebSocket server
+ server = await websockets.serve(
+ handle_jambonz_websocket,
+ "0.0.0.0",
+ 3007,
+ ping_interval=20,
+ ping_timeout=10,
+ close_timeout=10
+ )
+
+ logger.info("Unified Arabic STT WebSocket server started on ws://0.0.0.0:3007")
+ logger.info("Ready to handle jambonz STT requests with both models")
+ logger.info("ROUTING:")
+ logger.info("- language: 'ar-EG' → NeMo FastConformer (with built-in number conversion)")
+ logger.info("- language: 'ar-EG-whis' → Whisper large-v3 (with pyarabic number conversion)")
+ logger.info("FEATURES:")
+ logger.info("- Continuous transcription with segmentation")
+ logger.info("- Voice Activity Detection")
+ logger.info("- Auto-final detection (2s silence timeout)")
+ logger.info("- Model-specific number conversion")
+ logger.info(f"AVAILABLE MODELS: {', '.join(models_available)}")
+
+ # Wait for the server to close
+ await server.wait_closed()
+
+if __name__ == "__main__":
+ print("=" * 80)
+ print("Unified Arabic STT Server (NeMo + Whisper)")
+ print("=" * 80)
+ print("WebSocket Port: 3007")
+ print("Protocol: jambonz STT API")
+ print("Audio Format: LINEAR16 PCM @ 8kHz → 16kHz")
+ print()
+ print("LANGUAGE ROUTING:")
+ print("- 'ar-EG' → NeMo FastConformer")
+ print(" • Built-in Arabic number word to digit conversion")
+ print(" • Optimized for Arabic dialects")
+ print("- 'ar-EG-whis' → Whisper large-v3")
+ print(" • pyarabic library number conversion (final transcripts only)")
+ print(" • OpenAI Whisper model")
+ print()
+ print("FEATURES:")
+ print("- Automatic model selection based on language parameter")
+ print("- Voice Activity Detection")
+ print("- Auto-final detection (2 seconds silence)")
+ print("- Model-specific number conversion strategies")
+ print("- Continuous transcription with segmentation")
+ print()
+
+ # Check model availability for startup info
+ nemo_status = "✓ Available" if asr_model_nemo is not None else "✗ Not Available"
+ whisper_status = "✓ Available" if whisper_model is not None else "✗ Not Available"
+ arabic_numbers_status = "✓ Available" if arabic_numbers_available else "✗ Not Available (install pyarabic)"
+
+ print("MODEL STATUS:")
+ print(f"- NeMo FastConformer: {nemo_status}")
+ print(f"- Whisper large-v3: {whisper_status}")
+ print(f"- pyarabic (Whisper numbers): {arabic_numbers_status}")
+ print("=" * 80)
+
+ try:
+ asyncio.run(main())
+ except KeyboardInterrupt:
+ print("\nShutting down unified server...")
+ except Exception as e:
+ print(f"Server error: {e}")
\ No newline at end of file
diff --git a/denoiser_model.py b/denoiser_model.py
new file mode 100644
index 0000000000000000000000000000000000000000..57e0b1c7f2664ab610e28f12dadaef0042a4fbbb
--- /dev/null
+++ b/denoiser_model.py
@@ -0,0 +1,8 @@
+import torch
+from denoiser import pretrained
+
+device = "cuda" if torch.cuda.is_available() else "cpu"
+
+# Load DNS64 pretrained model (auto-downloads if not cached)
+denoiser_model = pretrained.dns64().to(device)
+denoiser_model.eval()
\ No newline at end of file
diff --git a/improved_asr_web_ui.html b/improved_asr_web_ui.html
new file mode 100644
index 0000000000000000000000000000000000000000..30fb9674cf8ec2f3e3a3022cf30fa69491fb9a0c
--- /dev/null
+++ b/improved_asr_web_ui.html
@@ -0,0 +1,729 @@
+
+
+
+
+
+ ASR WebSocket Testing Client with Sample Rate Analysis
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Disconnected
+
+
+
+
0
+
Calculated Sample Rate (Hz)
+
+
+
+
0
+
Audio Chunks Sent
+
+
+
0.0s
+
Recording Duration
+
+
+
+
+
+
+
+
+
+
Server Responses:
+
Waiting for connection...
+
+
+
+
Debug Console:
+
Ready to connect...
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pretrained_models/asr-whisper-large-v2-commonvoice-ar/hyperparams.yaml b/pretrained_models/asr-whisper-large-v2-commonvoice-ar/hyperparams.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..75f528dc35034d59a0c4524889f8df07428744c5
--- /dev/null
+++ b/pretrained_models/asr-whisper-large-v2-commonvoice-ar/hyperparams.yaml
@@ -0,0 +1,58 @@
+# ################################
+# Model: Whisper (Encoder-Decoder) + NLL
+# Augmentation: TimeDomainSpecAugment
+# Authors: Pooneh Mousavi 2022
+# ################################
+
+
+# URL for the biggest Fairseq english whisper model.
+whisper_hub: openai/whisper-large-v2
+
+# Normalize inputs with
+# the same normalization done in the paper. Refer to Appendix C for further information.
+normalized_transcripts: True
+
+
+language: arabic
+
+auto_mix_prec: False
+sample_rate: 16000
+
+# Decoding parameters
+min_decode_ratio: 0.0
+max_decode_ratio: 1.0
+test_beam_size: 8
+
+# Model parameters
+freeze_whisper: True
+freeze_encoder: True
+
+
+whisper: !new:speechbrain.lobes.models.huggingface_transformers.whisper.Whisper
+ source: !ref
+ freeze: !ref
+ freeze_encoder: !ref
+ save_path: whisper_checkpoints
+ encoder_only: False
+
+decoder: !new:speechbrain.decoders.seq2seq.S2SWhisperGreedySearcher
+ model: !ref
+ min_decode_ratio: !ref
+ max_decode_ratio: !ref
+
+# test_beam_searcher: !new:speechbrain.decoders.seq2seq.S2SWhisperBeamSearcher
+# module: [!ref ]
+# min_decode_ratio: !ref
+# max_decode_ratio: !ref
+# beam_size: !ref
+
+
+modules:
+ whisper: !ref
+ decoder: !ref
+
+
+pretrainer: !new:speechbrain.utils.parameter_transfer.Pretrainer
+ loadables:
+ whisper: !ref
+
diff --git a/pretrained_models/asr-whisper-large-v2-commonvoice-ar/whisper.ckpt b/pretrained_models/asr-whisper-large-v2-commonvoice-ar/whisper.ckpt
new file mode 100644
index 0000000000000000000000000000000000000000..2340c319c7fb035ae67c2fd381f63e1479bc9964
--- /dev/null
+++ b/pretrained_models/asr-whisper-large-v2-commonvoice-ar/whisper.ckpt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4ac653766f62d8701b6fe6177a77505f98564e6c5f4c03948f2c87ad21db18c4
+size 6173767281
diff --git a/requirements_denoiser.txt b/requirements_denoiser.txt
new file mode 100644
index 0000000000000000000000000000000000000000..2d266d2e23666289d4f42235d98703a6d34f7b91
--- /dev/null
+++ b/requirements_denoiser.txt
@@ -0,0 +1,3 @@
+pip install git+https://github.com/facebookresearch/denoiser
+
+pip install noisereduce
\ No newline at end of file
diff --git a/speech_brain_whisper_denoiser.py b/speech_brain_whisper_denoiser.py
new file mode 100644
index 0000000000000000000000000000000000000000..601db872b8b18bd699dd0523dfebfbe2dced087c
--- /dev/null
+++ b/speech_brain_whisper_denoiser.py
@@ -0,0 +1,741 @@
+# import torch
+# import asyncio
+# import websockets
+# import json
+# import threading
+# import numpy as np
+# from transformers import AutoModelForSpeechSeq2Seq, AutoProcessor, WhisperTokenizer, pipeline , WhisperForConditionalGeneration, WhisperProcessor
+# import subprocess
+# import logging
+# import time
+# from concurrent.futures import ThreadPoolExecutor
+# import struct
+# import re
+# 3 - 10 - 2025
+import torch
+import asyncio
+import websockets
+import json
+import threading
+import numpy as np
+from transformers import pipeline
+import subprocess
+import logging
+import time
+from concurrent.futures import ThreadPoolExecutor
+import re
+import tempfile
+import os
+import soundfile as sf
+from pathlib import Path
+# --- Denoiser added ---
+try:
+ import noisereduce as nr
+ denoiser_available = True
+ print("Denoiser available (using noisereduce)")
+except ImportError:
+ denoiser_available = False
+ print("noisereduce not available - install with: pip install noisereduce")
+##############################################################################################
+# Arabic number conversion imports
+try:
+ from pyarabic.number import text2number
+ arabic_numbers_available = True
+ print("Arabic number conversion available")
+except ImportError:
+ arabic_numbers_available = False
+ print("pyarabic not available - install with: pip install pyarabic")
+ print("Arabic numbers will not be converted to digits")
+
+# Set up logging
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+# 3 - 10 - 2025
+# def denoise_audio(audio_data, sample_rate=16000):
+# """Apply noise reduction to audio using noisereduce."""
+# if not denoiser_available or len(audio_data) == 0:
+# return audio_data
+# try:
+# reduced = nr.reduce_noise(y=audio_data, sr=sample_rate)
+# return reduced.astype(np.float32)
+# except Exception as e:
+# logger.warning(f"Denoiser failed: {e}")
+# return audio_data
+#############################################################################################
+def convert_arabic_numbers_in_sentence(sentence: str) -> str:
+ """
+ Replace Arabic number words in a sentence with digits,
+ preserving all other words and punctuation.
+ Handles common spelling variants and zero explicitly.
+ """
+ try:
+ print("Fxn called--------------")
+
+ # --- Normalization step ---
+ replacements = {
+ "اربعة": "أربعة",
+ "اربع": "أربع",
+ "اثنين": "اثنان",
+ "اتنين": "اثنان", # Egyptian variant
+ "ثلاث": "ثلاثة",
+ "خمس": "خمسة",
+ "ست": "ستة",
+ "سبع": "سبعة",
+ "ثمان": "ثمانية",
+ "تسع": "تسعة",
+ "عشر": "عشرة",
+ }
+ for wrong, correct in replacements.items():
+ sentence = re.sub(rf"\b{wrong}\b", correct, sentence)
+
+ # --- Split by whitespace but keep spaces ---
+ words = re.split(r'(\s+)', sentence)
+ converted_words = []
+
+ for word in words:
+ stripped = word.strip()
+ if not stripped: # skip spaces
+ converted_words.append(word)
+ continue
+
+ try:
+ num = text2number(stripped)
+
+ # Accept valid numbers, including zero explicitly
+ if isinstance(num, int):
+ if num != 0 or stripped == "صفر":
+ converted_words.append(str(num))
+ else:
+ converted_words.append(word)
+ else:
+ converted_words.append(word)
+
+ except Exception:
+ converted_words.append(word)
+
+ return ''.join(converted_words)
+
+ except Exception as e:
+ logger.warning(f"Error converting Arabic numbers: {e}")
+ return sentence
+
+
+# Try to install flash-attn if not available
+try:
+ import flash_attn
+ use_flash_attn = True
+except ImportError:
+ print("Flash attention not available, using standard attention")
+ use_flash_attn = False
+ try:
+ subprocess.run(
+ "pip install websockets",
+ shell=True,
+ check=False
+ )
+ subprocess.run(
+ "pip install flash-attn --no-build-isolation",
+ shell=True,
+ check=False
+ )
+ except:
+ pass
+
+device = "cuda" if torch.cuda.is_available() else "cpu"
+# --- Facebook Denoiser added ---
+try:
+ import torchaudio
+ from denoiser import pretrained
+ # Load DNS64 pretrained model (auto-downloads if not cached)
+ denoiser_model = pretrained.dns64().to(device)
+ denoiser_model.eval()
+ denoiser_available = True
+ print("facebook/denoiser loaded successfully")
+except ImportError as e:
+ denoiser_available = False
+ print("facebook/denoiser not available - install with: pip install denoiser torchaudio")
+ denoiser_model = None
+
+
+torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
+MODEL_NAME = "alaatiger989/FT_Arabic_Whisper_V1_1"#"openai/whisper-large-v3-turbo"
+
+print(f"Using device: {device}")
+print(f"CUDA available: {torch.cuda.is_available()}")
+MODEL_NAME = "speechbrain/asr-whisper-large-v2-commonvoice-ar"
+# Replace your pipeline definition
+from speechbrain.inference.ASR import WhisperASR
+
+# Load the SpeechBrain model
+model = WhisperASR.from_hparams(
+ source="speechbrain/asr-whisper-large-v2-commonvoice-ar",
+ savedir="pretrained_models/asr-whisper-large-v2-commonvoice-ar",
+ run_opts={"device": "cuda"} if torch.cuda.is_available() else {}
+)
+
+
+def denoise_audio(audio_data, sample_rate=16000):
+ """Apply denoising using facebook/denoiser pretrained model."""
+ if denoiser_model is None or len(audio_data) == 0:
+ return audio_data
+ try:
+ audio_tensor = torch.tensor(audio_data, dtype=torch.float32, device=device).unsqueeze(0)
+ with torch.no_grad():
+ denoised_tensor = denoiser_model(audio_tensor)[0] # no sample_rate arg
+ return denoised_tensor.squeeze().cpu().numpy().astype("float32")
+ except Exception as e:
+ print(f"[WARN] Denoiser failed: {e}")
+ return audio_data
+
+# Thread pool for processing audio
+executor = ThreadPoolExecutor(max_workers=4)
+
+class JambonzAudioBuffer:
+ def __init__(self, sample_rate=8000, chunk_duration=1.0):
+ self.sample_rate = sample_rate
+ self.chunk_duration = chunk_duration
+ self.chunk_samples = int(chunk_duration * sample_rate)
+
+ self.buffer = np.array([], dtype=np.float32)
+ self.lock = threading.Lock()
+ self.total_audio = np.array([], dtype=np.float32)
+
+ # Voice Activity Detection (simple energy-based)
+ self.silence_threshold = 0.01
+ self.min_speech_samples = int(0.3 * sample_rate) # 300ms minimum speech
+
+ def add_audio(self, audio_data):
+ with self.lock:
+ self.buffer = np.concatenate([self.buffer, audio_data])
+ self.total_audio = np.concatenate([self.total_audio, audio_data])
+
+ def has_chunk_ready(self):
+ with self.lock:
+ return len(self.buffer) >= self.chunk_samples
+
+ def is_speech(self, audio_chunk):
+ """Simple VAD based on energy"""
+ if len(audio_chunk) < self.min_speech_samples:
+ return False
+ energy = np.mean(np.abs(audio_chunk))
+ return energy > self.silence_threshold
+
+ def get_chunk_for_processing(self):
+ """Get audio chunk for processing - but don't remove it from buffer for interim results"""
+ with self.lock:
+ if len(self.buffer) < self.chunk_samples:
+ return None
+
+ # For interim results, we want to trigger processing but keep accumulating audio
+ # So we just return a signal that we have enough audio, but don't consume it
+ return np.array([1]) # Return a dummy array to signal chunk is ready
+
+ def get_all_audio(self):
+ """Get all accumulated audio for final transcription"""
+ with self.lock:
+ return self.total_audio.copy()
+
+ def clear(self):
+ with self.lock:
+ self.buffer = np.array([], dtype=np.float32)
+ self.total_audio = np.array([], dtype=np.float32)
+
+def linear16_to_audio(audio_bytes, sample_rate=8000):
+ """Convert LINEAR16 PCM bytes to numpy array (jambonz format)"""
+ try:
+ # jambonz sends LINEAR16 PCM at 8kHz
+ audio_array = np.frombuffer(audio_bytes, dtype=np.int16)
+ # Convert to float32 and normalize
+ audio_array = audio_array.astype(np.float32) / 32768.0
+ return audio_array
+ except Exception as e:
+ logger.error(f"Error converting LINEAR16 to audio: {e}")
+ return np.array([], dtype=np.float32)
+
+def resample_audio(audio_data, source_rate, target_rate):
+ """Simple resampling from 8kHz to 16kHz"""
+ if source_rate == target_rate:
+ return audio_data
+ ratio = target_rate / source_rate
+ indices = np.arange(0, len(audio_data), 1/ratio)
+ indices = indices[indices < len(audio_data)]
+ resampled = np.interp(indices, np.arange(len(audio_data)), audio_data)
+ return resampled.astype(np.float32)
+
+
+import os
+import tempfile
+import soundfile as sf
+import logging
+
+logger = logging.getLogger(__name__)
+from pathlib import Path
+import uuid
+import shutil
+# Project-level temp folder
+PROJECT_DIR = Path(__file__).parent.resolve()
+AUDIO_TMP_DIR = PROJECT_DIR / "temp_audio"
+AUDIO_TMP_DIR.mkdir(exist_ok=True)
+def transcribe_chunk_direct(audio_data, source_sample_rate=8000, target_sample_rate=16000):
+ try:
+ if len(audio_data) == 0:
+ return ""
+
+ # Step 1: Resample
+ resampled_audio = resample_audio(audio_data, source_sample_rate, target_sample_rate)
+
+ # Step 2: Denoise
+ resampled_audio = denoise_audio(resampled_audio)
+
+ # Step 3: Check minimum length (100ms)
+ min_samples = int(0.1 * target_sample_rate)
+ if len(resampled_audio) < min_samples:
+ return ""
+
+ # Step 4: Convert numpy -> torch tensor
+ waveform = torch.tensor(resampled_audio, dtype=torch.float32).unsqueeze(0) # [1, T]
+
+ # Step 5: Create wav_lens (normalized length)
+ wav_lens = torch.tensor([1.0]) # full length, no padding
+
+ # Step 6: Transcribe
+ words, tokens = model.transcribe_batch(waveform, wav_lens)
+
+ # Step 7: Convert list of words to a sentence
+ transcription = " ".join(words[0]) if words and len(words) > 0 else ""
+
+ logger.info(f"SpeechBrain transcription: '{transcription}'")
+ return transcription.strip()
+
+ except Exception as e:
+ logger.error(f"Error during SpeechBrain transcription: {e}")
+ return ""
+
+
+# def transcribe_chunk_direct(audio_data, source_sample_rate=8000, target_sample_rate=16000):
+# """Transcribe audio chunk using model's generate method directly"""
+# try:
+# if len(audio_data) == 0:
+# return ""
+
+# # Resample from 8kHz to 16kHz for Whisper
+# resampled_audio = resample_audio(audio_data, source_sample_rate, target_sample_rate)
+
+# # Ensure minimum length for Whisper
+# min_samples = int(0.1 * target_sample_rate) # 100ms minimum
+# if len(resampled_audio) < min_samples:
+# return ""
+
+# start_time = time.time()
+
+# # Prepare input features with proper dtype
+# input_features = processor(
+# resampled_audio,
+# sampling_rate=target_sample_rate,
+# return_tensors="pt"
+# ).input_features
+
+# # Ensure correct dtype and device
+# input_features = input_features.to(device=device, dtype=torch_dtype)
+
+# # Create attention mask to avoid warnings
+# attention_mask = torch.ones(
+# input_features.shape[:-1],
+# dtype=torch.long,
+# device=device
+# )
+
+# # Generate transcription using model directly
+# with torch.no_grad():
+# predicted_ids = model.generate(
+# input_features,
+# attention_mask=attention_mask,
+# max_new_tokens=128,
+# do_sample=False,
+# temperature=0.0,
+# num_beams=1,
+# language="ar",
+# task="transcribe",
+# pad_token_id=tokenizer.pad_token_id,
+# eos_token_id=tokenizer.eos_token_id
+# )
+
+# # Decode the transcription
+# transcription = tokenizer.batch_decode(
+# predicted_ids,
+# skip_special_tokens=True
+# )[0].strip()
+
+# end_time = time.time()
+
+# logger.info(f"Direct transcription completed in {end_time - start_time:.2f}s: '{transcription}'")
+# return transcription
+
+# except Exception as e:
+# logger.error(f"Error during direct transcription: {e}")
+# return ""
+
+class JambonzSTTHandler:
+ def __init__(self, websocket):
+ self.websocket = websocket
+ self.audio_buffer = None
+ self.config = {}
+ self.running = True
+ self.transcription_task = None
+ self.full_transcript = ""
+ self.last_partial = ""
+
+ # Auto-final detection variables
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.silence_timeout = 1.5 # 3 seconds of silence to trigger final
+ self.min_interim_count = 1 # Minimum interim results before considering final
+ self.auto_final_task = None
+ self.accumulated_transcript = ""
+ self.final_sent = False
+
+ async def start_processing(self, start_message):
+ """Initialize with start message from jambonz"""
+ self.config = {
+ "language": start_message.get("language", "ar-EG"),
+ "format": start_message.get("format", "raw"),
+ "encoding": start_message.get("encoding", "LINEAR16"),
+ "sample_rate": start_message.get("sampleRateHz", 8000),
+ "interim_results": start_message.get("interimResults", True),
+ "options": start_message.get("options", {})
+ }
+
+ logger.info(f"STT session started with config: {self.config}")
+
+ # Initialize audio buffer
+ self.audio_buffer = JambonzAudioBuffer(
+ sample_rate=self.config["sample_rate"],
+ chunk_duration=1.0 # Process every 1 second
+ )
+
+ # Reset auto-final detection variables
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.accumulated_transcript = ""
+ self.final_sent = False
+
+ # Start background transcription task
+ self.transcription_task = asyncio.create_task(self._process_audio_chunks())
+
+ # Start auto-final detection task
+ self.auto_final_task = asyncio.create_task(self._monitor_for_auto_final())
+
+ async def stop_processing(self):
+ """Stop processing and send final transcription"""
+ self.running = False
+
+ # Cancel background tasks
+ if self.transcription_task:
+ self.transcription_task.cancel()
+ try:
+ await self.transcription_task
+ except asyncio.CancelledError:
+ pass
+
+ if self.auto_final_task:
+ self.auto_final_task.cancel()
+ try:
+ await self.auto_final_task
+ except asyncio.CancelledError:
+ pass
+
+ # Send final transcription if not already sent
+ if not self.final_sent and self.accumulated_transcript.strip():
+ await self.send_transcription(self.accumulated_transcript, is_final=True)
+
+ # Also process any remaining audio for comprehensive final transcription
+ if self.audio_buffer:
+ all_audio = self.audio_buffer.get_all_audio()
+ if len(all_audio) > 0 and not self.final_sent:
+ loop = asyncio.get_event_loop()
+ final_transcription = await loop.run_in_executor(
+ executor,
+ transcribe_chunk_direct,
+ all_audio,
+ self.config["sample_rate"]
+ )
+
+ if final_transcription.strip():
+ # Send comprehensive final transcription
+ await self.send_transcription(final_transcription, is_final=True)
+
+ logger.info("STT session ended")
+
+ async def add_audio_data(self, audio_bytes):
+ """Add audio data to buffer"""
+ if self.audio_buffer:
+ audio_data = linear16_to_audio(audio_bytes, self.config["sample_rate"])
+ self.audio_buffer.add_audio(audio_data)
+
+ async def _process_audio_chunks(self):
+ """Process audio chunks for interim results"""
+ while self.running and self.config.get("interim_results", False):
+ try:
+ if self.audio_buffer and self.audio_buffer.has_chunk_ready():
+ chunk_signal = self.audio_buffer.get_chunk_for_processing()
+ if chunk_signal is not None:
+ # Get all accumulated audio so far for complete transcription
+ all_audio = self.audio_buffer.get_all_audio()
+
+ # Only process if we have actual speech content
+ if len(all_audio) > 0 and self.audio_buffer.is_speech(all_audio[-self.audio_buffer.chunk_samples:]):
+ # Run transcription on all accumulated audio
+ loop = asyncio.get_event_loop()
+ transcription = await loop.run_in_executor(
+ executor,
+ transcribe_chunk_direct,
+ all_audio,
+ self.config["sample_rate"]
+ )
+
+ if transcription.strip() and transcription != self.last_partial:
+ self.last_partial = transcription
+ self.accumulated_transcript = transcription # Update accumulated transcript
+ self.interim_count += 1
+ self.last_interim_time = time.time()
+
+ # Send interim result
+ await self.send_transcription(transcription, is_final=False)
+
+ logger.info(f"Interim #{self.interim_count}: '{transcription}'")
+
+ # Small delay to prevent excessive processing
+ await asyncio.sleep(0.1)
+
+ except Exception as e:
+ logger.error(f"Error in chunk processing: {e}")
+ await asyncio.sleep(1)
+
+ async def _monitor_for_auto_final(self):
+ """Monitor for auto-final conditions: 3 seconds silence after 3+ interim results"""
+ while self.running:
+ try:
+ current_time = time.time()
+
+ # Check if we should send auto-final transcription
+ if (self.interim_count >= self.min_interim_count and
+ self.last_interim_time is not None and
+ (current_time - self.last_interim_time) >= self.silence_timeout and
+ not self.final_sent and
+ self.accumulated_transcript.strip()):
+
+ logger.info(f"Auto-final triggered: {self.interim_count} interim results, "
+ f"{current_time - self.last_interim_time:.1f}s silence")
+
+ # Send the accumulated transcript as final
+ await self.send_transcription(self.accumulated_transcript, is_final=True)
+ self.final_sent = True
+
+ # Reset counters for potential next utterance
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.accumulated_transcript = ""
+
+ # Check every 0.5 seconds
+ await asyncio.sleep(0.5)
+
+ except Exception as e:
+ logger.error(f"Error in auto-final monitoring: {e}")
+ await asyncio.sleep(1)
+
+ # async def send_transcription(self, text, is_final=False, confidence=0.9):
+ # """Send transcription in jambonz format with Arabic number conversion"""
+ # try:
+ # # Convert Arabic numbers to digits before sending
+ # original_text = text
+ # converted_text = convert_arabic_numbers_in_sentence(text)
+
+ # # Log the conversion if numbers were found and converted
+ # if original_text != converted_text:
+ # logger.info(f"Arabic numbers converted: '{original_text}' -> '{converted_text}'")
+
+ # message = {
+ # "type": "transcription",
+ # "is_final": is_final,
+ # "alternatives": [
+ # {
+ # "transcript": converted_text,
+ # "confidence": confidence
+ # }
+ # ],
+ # "language": self.config.get("language", "ar-EG"),
+ # "channel": 1
+ # }
+
+ # await self.websocket.send(json.dumps(message))
+ # logger.info(f"Sent {'FINAL' if is_final else 'interim'} transcription: '{converted_text}'")
+
+ # if is_final:
+ # self.final_sent = True
+
+ # except Exception as e:
+ # logger.error(f"Error sending transcription: {e}")
+
+
+
+ async def send_transcription(self, text, is_final=False, confidence=0.9):
+ """Send transcription in jambonz format with Arabic number conversion, only for final results"""
+ try:
+ if not is_final:
+ # Do nothing for interim results
+ logger.debug("Skipping interim transcription (not final).")
+ return
+
+ # Convert Arabic numbers only for final transcripts
+ original_text = text
+ converted_text = convert_arabic_numbers_in_sentence(text)
+
+ # Log the conversion if numbers were found and converted
+ if original_text != converted_text:
+ logger.info(f"Arabic numbers converted: '{original_text}' -> '{converted_text}'")
+
+ message = {
+ "type": "transcription",
+ "is_final": True,
+ "alternatives": [
+ {
+ "transcript": original_text,#converted_text,
+ "confidence": confidence
+ }
+ ],
+ "language": self.config.get("language", "ar-EG"),
+ "channel": 1
+ }
+
+ # Send only final messages
+ await self.websocket.send(json.dumps(message))
+ logger.info(f"Sent FINAL transcription: '{converted_text}'")
+
+ self.final_sent = True
+
+ except Exception as e:
+ logger.error(f"Error sending transcription: {e}")
+
+
+
+
+ async def send_error(self, error_message):
+ """Send error message in jambonz format"""
+ try:
+ message = {
+ "type": "error",
+ "error": error_message
+ }
+ await self.websocket.send(json.dumps(message))
+ logger.error(f"Sent error: {error_message}")
+ except Exception as e:
+ logger.error(f"Error sending error message: {e}")
+
+async def handle_jambonz_websocket(websocket):
+ """Handle jambonz WebSocket connections"""
+
+ client_id = f"jambonz_{id(websocket)}"
+ logger.info(f"New jambonz connection: {client_id}")
+
+ handler = JambonzSTTHandler(websocket)
+
+ try:
+ async for message in websocket:
+ try:
+ if isinstance(message, str):
+ # Handle JSON control messages
+ data = json.loads(message)
+ message_type = data.get("type")
+
+ if message_type == "start":
+ logger.info(f"Received start message: {data}")
+ await handler.start_processing(data)
+
+ elif message_type == "stop":
+ logger.info("Received stop message")
+ await handler.stop_processing()
+ # Close websocket after final transcription
+ await websocket.close(code=1000, reason="Session completed")
+ break
+
+ else:
+ logger.warning(f"Unknown message type: {message_type}")
+ await handler.send_error(f"Unknown message type: {message_type}")
+
+ else:
+ # Handle binary audio data (LINEAR16 PCM)
+ if handler.audio_buffer is None:
+ await handler.send_error("Received audio before start message")
+ continue
+
+ await handler.add_audio_data(message)
+
+ except json.JSONDecodeError as e:
+ logger.error(f"JSON decode error: {e}")
+ await handler.send_error(f"Invalid JSON: {str(e)}")
+ except Exception as e:
+ logger.error(f"Error processing message: {e}")
+ await handler.send_error(f"Processing error: {str(e)}")
+
+ except websockets.exceptions.ConnectionClosed:
+ logger.info(f"jambonz connection closed: {client_id}")
+ except Exception as e:
+ logger.error(f"jambonz WebSocket error: {e}")
+ try:
+ await handler.send_error(str(e))
+ except:
+ pass
+ finally:
+ if handler.running:
+ await handler.stop_processing()
+ logger.info(f"jambonz connection ended: {client_id}")
+
+async def main():
+ """Start the jambonz STT WebSocket server"""
+ logger.info("Starting Jambonz Custom STT WebSocket server on port 3006...")
+
+ # Start WebSocket server
+ server = await websockets.serve(
+ handle_jambonz_websocket,
+ "0.0.0.0",
+ 3006,
+ ping_interval=20,
+ ping_timeout=10,
+ close_timeout=10
+ )
+
+ logger.info("Jambonz Custom STT WebSocket server started on ws://0.0.0.0:3006")
+ logger.info("Ready to handle jambonz STT requests")
+ logger.info("- Expects LINEAR16 PCM audio at 8kHz")
+ logger.info("- Supports interim results with auto-final detection")
+ logger.info("- Auto-final: 3+ interim results + 1.3s silence")
+ logger.info("- Resamples to 16kHz for Whisper processing")
+ logger.info("- Converts Arabic numbers to digits before sending")
+
+ # Wait for the server to close
+ await server.wait_closed()
+
+if __name__ == "__main__":
+ print("=" * 60)
+ print("Jambonz Custom STT Server with Whisper + Arabic Numbers")
+ print("=" * 60)
+ print(f"Model: {MODEL_NAME}")
+ print(f"Device: {device}")
+ print("WebSocket Port: 3006")
+ print("Protocol: jambonz STT API")
+ print("Audio Format: LINEAR16 PCM @ 8kHz")
+ print("Auto-Final: 2+ speech activities + 1.3s silence")
+ print("Arabic Numbers: Converted to digits in FINAL transcriptions only")
+ print("Interim Results: DISABLED (final transcription only)")
+ if arabic_numbers_available:
+ print("✓ pyarabic library available for number conversion")
+ else:
+ print("✗ pyarabic library not available - install with: pip install pyarabic")
+ print("=" * 60)
+
+ try:
+ asyncio.run(main())
+ except KeyboardInterrupt:
+ print("\nShutting down server...")
+ except Exception as e:
+ print(f"Server error: {e}")
diff --git a/stt_ar_fastconformer_hybrid_large_pcd_v1.0.nemo b/stt_ar_fastconformer_hybrid_large_pcd_v1.0.nemo
new file mode 100644
index 0000000000000000000000000000000000000000..2eb7700ee82d4c762a28b1af438bec82429186e0
--- /dev/null
+++ b/stt_ar_fastconformer_hybrid_large_pcd_v1.0.nemo
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d29d19d7c054a5fc010ac6815e9cbb0dd1b21a30e0a7f7f2982e1fecaf0c3e31
+size 459233280
diff --git a/w_nemo.py b/w_nemo.py
new file mode 100644
index 0000000000000000000000000000000000000000..0fd52f2da1e89e00a6742f7980f2e4aeffca90a7
--- /dev/null
+++ b/w_nemo.py
@@ -0,0 +1,1033 @@
+import torch
+import asyncio
+import websockets
+import json
+import threading
+import numpy as np
+import logging
+import time
+import tempfile
+import os
+import re
+from concurrent.futures import ThreadPoolExecutor
+import subprocess
+import struct
+
+# NeMo imports
+import nemo.collections.asr as nemo_asr
+import soundfile as sf
+
+# Whisper imports
+# from transformers import AutoModelForSpeechSeq2Seq, AutoProcessor, WhisperTokenizer, pipeline
+from transformers import AutoModelForSpeechSeq2Seq, AutoProcessor
+
+
+# Arabic number conversion imports for Whisper
+try:
+ from pyarabic.number import text2number
+ arabic_numbers_available = True
+ print("✓ pyarabic library available for Whisper number conversion")
+except ImportError:
+ arabic_numbers_available = False
+ print("✗ pyarabic not available - install with: pip install pyarabic")
+ print("Arabic numbers will not be converted to digits for Whisper")
+
+# Set up logging
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+
+# ===== NeMo Arabic number mapping =====
+arabic_numbers_nemo = {
+ # Basic digits
+ "سفر": "0", "فيرو": "0", "هيرو": "0","صفر": "0", "زيرو": "0", "٠": "0","زيو": "0","زير": "0","زير": "0","زر": "0","زروا": "0","زرا": "0","زيره ": "0","زرو ": "0",
+ "واحد": "1", "واحدة": "1", "١": "1",
+ "اتنين": "2", "اثنين": "2", "إثنين": "2", "اثنان": "2", "إثنان": "2", "٢": "2",
+ "تلاتة": "3", "ثلاثة": "3", "٣": "3","تلاته": "3","ثلاثه": "3","ثلاثا": "3","تلاتا": "3",
+ "اربعة": "4", "أربعة": "4", "٤": "4","اربعه": "4","أربعه": "4","أربع": "4","اربع": "4","اربعا": "4","أربعا": "4",
+ "خمسة": "5", "خمسه": "5", "٥": "5", "خمس": "5", "خمسا": "5",
+ "ستة": "6", "سته": "6", "٦": "6", "ست": "6", "ستّا": "6", "ستةً": "6",
+ "سبعة": "7", "سبعه": "7", "٧": "7", "سبع": "7", "سبعا": "7",
+ "ثمانية": "8", "ثمانيه": "8", "٨": "8", "ثمان": "8", "ثمنية": "8", "ثمنيه": "8", "ثمانيا": "8", "ثمن": "8",
+ "تسعة": "9", "تسعه": "9", "٩": "9", "تسع": "9", "تسعا": "9",
+
+ # Teens
+ "عشرة": "10", "١٠": "10",
+ "حداشر": "11", "احد عشر": "11","احداشر": "11",
+ "اتناشر": "12", "اثنا عشر": "12",
+ "تلتاشر": "13", "ثلاثة عشر": "13",
+ "اربعتاشر": "14", "أربعة عشر": "14",
+ "خمستاشر": "15", "خمسة عشر": "15",
+ "ستاشر": "16", "ستة عشر": "16",
+ "سبعتاشر": "17", "سبعة عشر": "17",
+ "طمنتاشر": "18", "ثمانية عشر": "18",
+ "تسعتاشر": "19", "تسعة عشر": "19",
+
+ # Tens
+ "عشرين": "20", "٢٠": "20",
+ "تلاتين": "30", "ثلاثين": "30", "٣٠": "30",
+ "اربعين": "40", "أربعين": "40", "٤٠": "40",
+ "خمسين": "50", "٥٠": "50",
+ "ستين": "60", "٦٠": "60",
+ "سبعين": "70", "٧٠": "70",
+ "تمانين": "80", "ثمانين": "80", "٨٠": "80","تمانون": "80","ثمانون": "80",
+ "تسعين": "90", "٩٠": "90",
+
+ # Hundreds
+ "مية": "100", "مائة": "100", "مئة": "100", "١٠٠": "100",
+ "ميتين": "200", "مائتين": "200",
+ "تلاتمية": "300", "ثلاثمائة": "300",
+ "اربعمية": "400", "أربعمائة": "400",
+ "خمسمية": "500", "خمسمائة": "500",
+ "ستمية": "600", "ستمائة": "600",
+ "سبعمية": "700", "سبعمائة": "700",
+ "تمانمية": "800", "ثمانمائة": "800",
+ "تسعمية": "900", "تسعمائة": "900",
+
+ # Thousands
+ "ألف": "1000", "الف": "1000", "١٠٠٠": "1000",
+ "ألفين": "2000", "الفين": "2000",
+ "تلات تلاف": "3000", "ثلاثة آلاف": "3000",
+ "اربعة آلاف": "4000", "أربعة آلاف": "4000",
+ "خمسة آلاف": "5000",
+ "ستة آلاف": "6000",
+ "سبعة آلاف": "7000",
+ "تمانية آلاف": "8000", "ثمانية آلاف": "8000",
+ "تسعة آلاف": "9000",
+
+ # Large numbers
+ "عشرة آلاف": "10000",
+ "مية ألف": "100000", "مائة ألف": "100000",
+ "مليون": "1000000", "١٠٠٠٠٠٠": "1000000",
+ "ملايين": "1000000",
+ "مليار": "1000000000", "١٠٠٠٠٠٠٠٠٠": "1000000000"
+}
+
+def replace_arabic_numbers_nemo(text: str) -> str:
+ """Convert Arabic number words to digits for NeMo"""
+ for word, digit in arabic_numbers_nemo.items():
+ text = re.sub(rf"\b{word}\b", digit, text)
+ return text
+
+def convert_arabic_numbers_whisper(sentence: str) -> str:
+ """
+ Replace Arabic number words in a sentence with digits for Whisper,
+ preserving all other words and punctuation.
+ """
+ if not arabic_numbers_available or not sentence.strip():
+ return sentence
+
+ try:
+ # Normalization step
+ replacements = {
+ "اربعة": "أربعة", "اربع": "أربع", "اثنين": "اثنان",
+ "اتنين": "اثنان", "ثلاث": "ثلاثة", "خمس": "خمسة",
+ "ست": "ستة", "سبع": "سبعة", "ثمان": "ثمانية",
+ "تسع": "تسعة", "عشر": "عشرة",
+ }
+ for wrong, correct in replacements.items():
+ sentence = re.sub(rf"\b{wrong}\b", correct, sentence)
+
+ # Split by whitespace but keep spaces
+ words = re.split(r'(\s+)', sentence)
+ converted_words = []
+
+ for word in words:
+ stripped = word.strip()
+ if not stripped: # skip spaces
+ converted_words.append(word)
+ continue
+
+ try:
+ num = text2number(stripped)
+ if isinstance(num, int):
+ if num != 0 or stripped == "صفر":
+ converted_words.append(str(num))
+ else:
+ converted_words.append(word)
+ else:
+ converted_words.append(word)
+ except Exception:
+ converted_words.append(word)
+
+ return ''.join(converted_words)
+
+ except Exception as e:
+ logger.warning(f"Error converting Arabic numbers: {e}")
+ return sentence
+
+# Global models
+asr_model_nemo = None
+whisper_model = None
+whisper_processor = None
+whisper_tokenizer = None
+device = None
+torch_dtype = None
+
+def initialize_models():
+ """Initialize both NeMo and Whisper models"""
+ global asr_model_nemo, whisper_model, whisper_processor, whisper_tokenizer, device, torch_dtype
+
+ # Initialize device settings
+ device = "cuda" if torch.cuda.is_available() else "cpu"
+ torch_dtype = torch.float16 if torch.cuda.is_available() else torch.float32
+
+ logger.info(f"Using device: {device}")
+ logger.info(f"CUDA available: {torch.cuda.is_available()}")
+
+ # Initialize NeMo model
+ logger.info("Loading NeMo FastConformer Arabic ASR model...")
+ model_path = "stt_ar_fastconformer_hybrid_large_pcd_v1.0.nemo"
+
+ if os.path.exists(model_path):
+ try:
+ asr_model_nemo = nemo_asr.models.EncDecCTCModel.restore_from(model_path)
+ asr_model_nemo.eval()
+ logger.info("✓ NeMo FastConformer model loaded successfully")
+ except Exception as e:
+ logger.error(f"Failed to load NeMo model: {e}")
+ asr_model_nemo = None
+ else:
+ logger.warning(f"NeMo model not found at: {model_path}")
+ asr_model_nemo = None
+
+ # Initialize Whisper model
+ # from transformers import AutoModelForSpeechSeq2Seq, AutoProcessor
+
+ logger.info("Loading Whisper large-v3 model...")
+ MODEL_NAME = "alaatiger989/FT_Arabic_Whisper_V1_1"
+
+ try:
+ # Try with flash attention first
+ try:
+ import flash_attn
+ whisper_model = AutoModelForSpeechSeq2Seq.from_pretrained(
+ MODEL_NAME,
+ torch_dtype=torch_dtype,
+ low_cpu_mem_usage=True,
+ use_safetensors=True,
+ attn_implementation="flash_attention_2"
+ )
+ logger.info("✓ Whisper loaded with flash attention")
+ except:
+ whisper_model = AutoModelForSpeechSeq2Seq.from_pretrained(
+ MODEL_NAME,
+ torch_dtype=torch_dtype,
+ low_cpu_mem_usage=True,
+ use_safetensors=True
+ )
+ logger.info("✓ Whisper loaded with standard attention")
+
+ whisper_model.to(device)
+ whisper_processor = AutoProcessor.from_pretrained(MODEL_NAME)
+
+ # Use processor.tokenizer, don’t reload separately
+ whisper_tokenizer = whisper_processor.tokenizer
+
+ logger.info("✓ Whisper model + tokenizer loaded successfully")
+
+ except Exception as e:
+ logger.error(f"Failed to load Whisper model: {e}")
+ whisper_model = None
+
+# Initialize models on startup
+initialize_models()
+
+# Thread pool for processing
+executor = ThreadPoolExecutor(max_workers=4)
+
+
+
+class JambonzAudioBuffer:
+ def __init__(self, sample_rate=8000, chunk_duration=1.0):
+ self.sample_rate = sample_rate
+ self.chunk_duration = chunk_duration
+ self.chunk_samples = int(chunk_duration * sample_rate)
+
+ self.buffer = np.array([], dtype=np.float32)
+ self.lock = threading.Lock()
+ self.total_audio = np.array([], dtype=np.float32)
+
+ # Voice Activity Detection - ADJUSTED FOR WHISPER
+ self.silence_threshold = 0.01 # Lower threshold for Whisper
+ self.min_speech_samples = int(0.3 * sample_rate) # 300ms minimum speech
+
+ def add_audio(self, audio_data):
+ with self.lock:
+ self.buffer = np.concatenate([self.buffer, audio_data])
+ self.total_audio = np.concatenate([self.total_audio, audio_data])
+
+ # Log audio addition for debugging
+ logger.debug(f"Added {len(audio_data)} audio samples, total: {len(self.total_audio)}")
+
+ def has_chunk_ready(self):
+ with self.lock:
+ ready = len(self.buffer) >= self.chunk_samples
+ if ready:
+ logger.debug(f"Chunk ready: {len(self.buffer)} >= {self.chunk_samples}")
+ return ready
+
+ def is_speech(self, audio_chunk):
+ """Enhanced VAD based on energy - better for Whisper"""
+ if len(audio_chunk) < self.min_speech_samples:
+ logger.debug(f"Audio too short for VAD: {len(audio_chunk)} < {self.min_speech_samples}")
+ return False
+
+ # Calculate RMS energy
+ rms_energy = np.sqrt(np.mean(audio_chunk ** 2))
+
+ # Also check peak amplitude
+ peak_amplitude = np.max(np.abs(audio_chunk))
+
+ is_speech = rms_energy > self.silence_threshold or peak_amplitude > (self.silence_threshold * 2)
+
+ logger.debug(f"VAD check - RMS: {rms_energy:.4f}, Peak: {peak_amplitude:.4f}, "
+ f"Threshold: {self.silence_threshold}, Speech: {is_speech}")
+
+ return is_speech
+
+ def get_chunk_for_processing(self):
+ """Get audio chunk for processing"""
+ with self.lock:
+ if len(self.buffer) < self.chunk_samples:
+ return None
+
+ logger.debug(f"Returning processing signal, buffer size: {len(self.buffer)}")
+ return np.array([1]) # Signal that chunk is ready
+
+ def get_all_audio(self):
+ """Get all accumulated audio"""
+ with self.lock:
+ audio_copy = self.total_audio.copy()
+ logger.debug(f"Returning {len(audio_copy)} total audio samples")
+ return audio_copy
+
+ def clear(self):
+ with self.lock:
+ self.buffer = np.array([], dtype=np.float32)
+ self.total_audio = np.array([], dtype=np.float32)
+ logger.debug("Audio buffer cleared")
+
+ def reset_for_new_segment(self):
+ """Reset buffers for new transcription segment"""
+ with self.lock:
+ self.buffer = np.array([], dtype=np.float32)
+ self.total_audio = np.array([], dtype=np.float32)
+ logger.debug("Audio buffer reset for new segment")
+
+def linear16_to_audio(audio_bytes, sample_rate=8000):
+ """Convert LINEAR16 PCM bytes to numpy array"""
+ try:
+ audio_array = np.frombuffer(audio_bytes, dtype=np.int16)
+ audio_array = audio_array.astype(np.float32) / 32768.0
+ return audio_array
+ except Exception as e:
+ logger.error(f"Error converting LINEAR16 to audio: {e}")
+ return np.array([], dtype=np.float32)
+
+from scipy.signal import resample_poly
+
+# def resample_audio(audio_data, source_rate, target_rate):
+# """High-quality resampling using polyphase resampler."""
+# if source_rate == target_rate:
+# return audio_data.astype(np.float32)
+# # convert float32 [-1..1] to float32 still, but resample
+# gcd = np.gcd(source_rate, target_rate)
+# up = target_rate // gcd
+# down = source_rate // gcd
+# # resample_poly expects 1D numpy array
+# try:
+# resampled = resample_poly(audio_data, up, down).astype(np.float32)
+# return resampled
+# except Exception as e:
+# logger.warning(f"resample_audio fallback: {e}")
+# # last-resort simple repeat (keep previous behavior) but warn
+# if source_rate == 8000 and target_rate == 16000:
+# return np.repeat(audio_data, 2).astype(np.float32)
+# return audio_data.astype(np.float32)
+
+import numpy as np
+from scipy.signal import resample_poly, butter, lfilter
+import webrtcvad
+import noisereduce as nr
+
+# Initialize WebRTC VAD once (0..3, higher = more aggressive/noisy environments)
+_vad = webrtcvad.Vad(2)
+
+def resample_audio(audio_data, source_rate, target_rate=16000,
+ lowcut=80.0, highcut=7600.0,
+ frame_ms=30, required_ratio=0.55):
+ """
+ Resample -> Bandpass filter -> Noise reduction -> WebRTC VAD speech detection.
+
+ Returns:
+ processed_audio (np.ndarray float32): cleaned/resampled audio
+ is_speech (bool): True if VAD detects speech
+ """
+
+ # --- Resample ---
+ if source_rate != target_rate:
+ gcd = np.gcd(source_rate, target_rate)
+ up = target_rate // gcd
+ down = source_rate // gcd
+ try:
+ audio_data = resample_poly(audio_data, up, down).astype(np.float32)
+ except Exception:
+ audio_data = np.repeat(audio_data, int(target_rate/source_rate)).astype(np.float32)
+ else:
+ audio_data = audio_data.astype(np.float32)
+
+ # --- Bandpass filter (speech range) ---
+ try:
+ nyq = 0.5 * target_rate
+ low = lowcut / nyq
+ high = highcut / nyq
+ b, a = butter(4, [low, high], btype='band')
+ audio_data = lfilter(b, a, audio_data).astype(np.float32)
+ except Exception:
+ pass
+
+ # --- Noise reduction ---
+ try:
+ if len(audio_data) >= int(0.25 * target_rate):
+ noise_clip = audio_data[:int(0.25 * target_rate)]
+ audio_data = nr.reduce_noise(y=audio_data, y_noise=noise_clip, sr=target_rate).astype(np.float32)
+ except Exception:
+ pass
+
+ # --- WebRTC VAD ---
+ def frame_generator(frame_ms, audio, sample_rate):
+ n = int(sample_rate * (frame_ms / 1000.0))
+ if len(audio) < n:
+ return
+ offset = 0
+ while offset + n <= len(audio):
+ frame = audio[offset:offset+n]
+ yield (frame * 32767).astype(np.int16).tobytes()
+ offset += n
+
+ frames = list(frame_generator(frame_ms, audio_data, target_rate))
+ voiced = 0
+ for f in frames:
+ try:
+ if _vad.is_speech(f, target_rate):
+ voiced += 1
+ except Exception:
+ pass
+ ratio = voiced / max(1, len(frames))
+ is_speech = ratio >= required_ratio
+
+ return audio_data, is_speech
+
+def transcribe_with_nemo(audio_data, source_sample_rate=8000, target_sample_rate=16000):
+ """Transcribe audio using NeMo FastConformer"""
+ try:
+ if len(audio_data) == 0 or asr_model_nemo is None:
+ return ""
+
+ # Resample to 16kHz (NeMo models typically expect 16kHz)
+ resampled_audio, has_speech = resample_audio(audio_data, source_sample_rate, target_sample_rate)
+
+ if has_speech:
+ print("Speech detected, sending to ASR...")
+ # Skip very short audio
+ min_samples = int(0.3 * target_sample_rate)
+ if len(resampled_audio) < min_samples:
+ return ""
+
+ start_time = time.time()
+
+ # Save audio to temporary file (NeMo expects file path)
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".wav") as tmp_file:
+ sf.write(tmp_file.name, resampled_audio, target_sample_rate)
+ tmp_path = tmp_file.name
+
+ try:
+ # Transcribe with NeMo
+ result = asr_model_nemo.transcribe([tmp_path])
+
+ if result and len(result) > 0:
+ # Handle different NeMo result formats
+ if hasattr(result[0], 'text'):
+ raw_text = result[0].text
+ elif isinstance(result[0], str):
+ raw_text = result[0]
+ else:
+ raw_text = str(result[0])
+
+ if not isinstance(raw_text, str):
+ raw_text = str(raw_text)
+
+ if raw_text and raw_text.strip():
+ # Convert Arabic numbers to digits for NeMo
+ cleaned_text = replace_arabic_numbers_nemo(raw_text)
+ end_time = time.time()
+
+ if cleaned_text.strip():
+ logger.info(f"NeMo transcription: '{cleaned_text}' (processed in {end_time - start_time:.2f}s)")
+
+ return cleaned_text.strip()
+
+ finally:
+ # Clean up temporary file
+ if os.path.exists(tmp_path):
+ os.remove(tmp_path)
+
+ return ""
+ else:
+ print("Silence/noise, skipping...")
+
+ except Exception as e:
+ logger.error(f"Error during NeMo transcription: {e}")
+ return ""
+
+def transcribe_with_whisper(audio_data, source_sample_rate=8000, target_sample_rate=16000):
+ """Transcribe audio chunk using Whisper model directly"""
+ try:
+ if len(audio_data) == 0 or whisper_model is None:
+ return ""
+
+ # Resample from 8kHz to 16kHz for Whisper
+ resampled_audio, has_speech = resample_audio(audio_data, source_sample_rate, target_sample_rate)
+ if has_speech:
+ print("Speech detected, sending to ASR...")
+ # Ensure minimum length for Whisper
+ min_samples = int(0.1 * target_sample_rate) # 100ms minimum
+ if len(resampled_audio) < min_samples:
+ return ""
+
+ start_time = time.time()
+
+ # Prepare input features with proper dtype
+ input_features = whisper_processor(
+ resampled_audio,
+ sampling_rate=target_sample_rate,
+ return_tensors="pt"
+ ).input_features
+
+ # Ensure correct dtype and device
+ input_features = input_features.to(device=device, dtype=torch_dtype)
+
+ # Create attention mask to avoid warnings
+ attention_mask = torch.ones(
+ input_features.shape[:-1],
+ dtype=torch.long,
+ device=device
+ )
+
+ # Generate transcription using model directly
+ with torch.no_grad():
+ predicted_ids = whisper_model.generate(
+ input_features,
+ attention_mask=attention_mask,
+ max_new_tokens=128,
+ do_sample=False,
+ # temperature=0.0,
+ num_beams=1,
+ language="english",
+ task="translate",
+ pad_token_id=whisper_tokenizer.pad_token_id,
+ eos_token_id=whisper_tokenizer.eos_token_id
+ )
+
+ # Decode the transcription
+ transcription = whisper_tokenizer.batch_decode(
+ predicted_ids,
+ skip_special_tokens=True
+ )[0].strip()
+
+ end_time = time.time()
+
+ logger.info(f"Whisper transcription completed in {end_time - start_time:.2f}s: '{transcription}'")
+ return transcription
+ else:
+ print("Silence/noise, skipping...")
+ except Exception as e:
+ logger.error(f"Error during Whisper transcription: {e}")
+ return ""
+
+class UnifiedSTTHandler:
+ def __init__(self, websocket):
+ self.websocket = websocket
+ self.audio_buffer = None
+ self.config = {}
+ self.running = False
+ self.transcription_task = None
+ self.use_nemo = False # Flag to determine which model to use
+
+ # Auto-final detection variables
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.silence_timeout = 2.9
+ self.min_interim_count = 1
+ self.auto_final_task = None
+ self.accumulated_transcript = ""
+ self.final_sent = False
+ self.segment_number = 0
+ self.last_partial = ""
+
+ # Processing tracking
+ self.processing_count = 0
+
+ # Add this debugging method to your UnifiedSTTHandler class
+
+ async def add_audio_data(self, audio_bytes):
+ """Add audio data to buffer with enhanced debugging"""
+ if self.audio_buffer and self.running:
+ audio_data = linear16_to_audio(audio_bytes, self.config["sample_rate"])
+ self.audio_buffer.add_audio(audio_data)
+
+ model_name = "NeMo" if self.use_nemo else "Whisper"
+
+ # Debug logging every few audio packets
+ if len(audio_data) > 0:
+ total_samples = len(self.audio_buffer.get_all_audio())
+ total_seconds = total_samples / self.config["sample_rate"]
+
+ # Log every second of audio
+ if int(total_seconds) != getattr(self, '_last_logged_second', -1):
+ logger.info(f"{model_name} - Accumulated {total_seconds:.1f}s of audio ({total_samples} samples)")
+ self._last_logged_second = int(total_seconds)
+
+ # Check if we should have chunks ready
+ chunk_ready = self.audio_buffer.has_chunk_ready()
+ logger.info(f"{model_name} - Chunk ready: {chunk_ready}")
+
+ async def start_processing(self, start_message):
+ """Initialize with start message from jambonz"""
+ self.config = {
+ "language": start_message.get("language", "ar-EG"),
+ "format": start_message.get("format", "raw"),
+ "encoding": start_message.get("encoding", "LINEAR16"),
+ "sample_rate": start_message.get("sampleRateHz", 8000),
+ "interim_results": True, # Always enable for internal processing
+ "options": start_message.get("options", {})
+ }
+
+ # Determine which model to use based on language parameter
+ language = self.config["language"]
+ if language == "ar-EG":
+ logger.info("Selected NeMo FastConformer")
+ self.use_nemo = True
+ model_name = "NeMo FastConformer"
+ elif language == "ar-EG-whis":
+ logger.info("Selected Whisper large-v3")
+ self.use_nemo = False
+ model_name = "Whisper large-v3"
+ else:
+ # Default to NeMo for any other Arabic variant
+ self.use_nemo = True
+ model_name = "NeMo FastConformer (default)"
+
+ logger.info(f"STT session started with {model_name} for language: {language}")
+ logger.info(f"Config: {self.config}")
+
+ # Check if selected model is available
+ if self.use_nemo and asr_model_nemo is None:
+ await self.send_error("NeMo model not available")
+ return
+ elif not self.use_nemo and whisper_model is None:
+ await self.send_error("Whisper model not available")
+ return
+
+ # Initialize audio buffer with model-specific settings
+ if self.use_nemo:
+ chunk_duration = 1.0 # NeMo processes every 1 second
+ else:
+ chunk_duration = 2.0 # Whisper processes every 2 seconds for better accuracy
+
+ self.audio_buffer = JambonzAudioBuffer(
+ sample_rate=self.config["sample_rate"],
+ chunk_duration=chunk_duration
+ )
+
+ # Adjust VAD threshold for Whisper
+ if not self.use_nemo:
+ self.audio_buffer.silence_threshold = 0.005 # Lower threshold for Whisper
+
+ # Reset session variables
+ self.running = True
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.accumulated_transcript = ""
+ self.final_sent = False
+ self.segment_number = 0
+ self.processing_count = 0
+ self.last_partial = ""
+
+ # Start background transcription task
+ self.transcription_task = asyncio.create_task(self._process_audio_chunks())
+
+ # Start auto-final detection task
+ self.auto_final_task = asyncio.create_task(self._monitor_for_auto_final())
+
+ logger.info(f"Background tasks started for {model_name}")
+
+
+
+ async def stop_processing(self):
+ """Stop current processing session"""
+ logger.info("Stopping STT session...")
+ self.running = False
+
+ # Cancel background tasks
+ for task in [self.transcription_task, self.auto_final_task]:
+ if task:
+ task.cancel()
+ try:
+ await task
+ except asyncio.CancelledError:
+ pass
+
+ # Send final transcription if not already sent
+ if not self.final_sent and self.accumulated_transcript.strip():
+ await self.send_transcription(self.accumulated_transcript, is_final=True)
+
+ # Process any remaining audio for comprehensive final transcription
+ if self.audio_buffer:
+ all_audio = self.audio_buffer.get_all_audio()
+ if len(all_audio) > 0 and not self.final_sent:
+ loop = asyncio.get_event_loop()
+
+ if self.use_nemo:
+ final_transcription = await loop.run_in_executor(
+ executor, transcribe_with_nemo, all_audio, self.config["sample_rate"]
+ )
+ else:
+ final_transcription = await loop.run_in_executor(
+ executor, transcribe_with_whisper, all_audio, self.config["sample_rate"]
+ )
+
+ if final_transcription.strip():
+ await self.send_transcription(final_transcription, is_final=True)
+
+ # Clear audio buffer
+ if self.audio_buffer:
+ self.audio_buffer.clear()
+
+ logger.info("STT session stopped")
+
+ async def start_new_segment(self):
+ """Start a new transcription segment"""
+ self.segment_number += 1
+ self.interim_count = 0
+ self.last_interim_time = None
+ self.accumulated_transcript = ""
+ self.final_sent = False
+ self.last_partial = ""
+ self.processing_count = 0
+
+ if self.audio_buffer:
+ self.audio_buffer.reset_for_new_segment()
+
+ logger.info(f"Started new transcription segment #{self.segment_number}")
+
+ async def add_audio_data(self, audio_bytes):
+ """Add audio data to buffer"""
+ if self.audio_buffer and self.running:
+ audio_data = linear16_to_audio(audio_bytes, self.config["sample_rate"])
+ self.audio_buffer.add_audio(audio_data)
+
+ async def _process_audio_chunks(self):
+ """Process audio chunks for interim results - with debugging"""
+ model_name = "NeMo" if self.use_nemo else "Whisper"
+ logger.info(f"Starting audio chunk processing for {model_name}")
+
+ chunk_count = 0
+
+ while self.running:
+ try:
+ if self.audio_buffer and self.audio_buffer.has_chunk_ready():
+ chunk_count += 1
+ logger.info(f"{model_name} - Processing chunk #{chunk_count}")
+
+ chunk_signal = self.audio_buffer.get_chunk_for_processing()
+ if chunk_signal is not None:
+ all_audio = self.audio_buffer.get_all_audio()
+
+ logger.info(f"{model_name} - Got {len(all_audio)} samples for processing")
+
+ if len(all_audio) > 0:
+ # Get the latest chunk for VAD check
+ latest_chunk_start = max(0, len(all_audio) - self.audio_buffer.chunk_samples)
+ latest_chunk = all_audio[latest_chunk_start:]
+
+ # Check for speech activity
+ has_speech = self.audio_buffer.is_speech(latest_chunk)
+ logger.info(f"{model_name} - Speech detected: {has_speech}")
+
+ if has_speech:
+ logger.info(f"{model_name} - Starting transcription...")
+
+ loop = asyncio.get_event_loop()
+ start_time = time.time()
+
+ try:
+ # Choose transcription method based on model selection
+ if self.use_nemo:
+ transcription = await loop.run_in_executor(
+ executor, transcribe_with_nemo, all_audio, self.config["sample_rate"]
+ )
+ else:
+ transcription = await loop.run_in_executor(
+ executor, transcribe_with_whisper, all_audio, self.config["sample_rate"]
+ )
+
+ process_time = time.time() - start_time
+ logger.info(f"{model_name} - Transcription completed in {process_time:.2f}s: '{transcription}'")
+
+ if transcription and transcription.strip():
+ self.processing_count += 1
+ self.accumulated_transcript = transcription
+
+ if transcription != self.last_partial or self.interim_count == 0:
+ self.last_partial = transcription
+ self.interim_count += 1
+ self.last_interim_time = time.time()
+ logger.info(f"{model_name} - Updated interim_count to {self.interim_count}")
+ else:
+ self.last_interim_time = time.time()
+ logger.info(f"{model_name} - Same transcription, updating time only")
+ else:
+ logger.info(f"{model_name} - No transcription result")
+
+ except Exception as e:
+ logger.error(f"{model_name} - Transcription error: {e}")
+ import traceback
+ traceback.print_exc()
+ else:
+ logger.debug(f"{model_name} - No speech in chunk")
+ else:
+ logger.warning(f"{model_name} - Chunk signal was None")
+ else:
+ # Log why chunk is not ready
+ if self.audio_buffer:
+ current_size = len(self.audio_buffer.buffer)
+ required_size = self.audio_buffer.chunk_samples
+ if current_size > 0:
+ logger.debug(f"{model_name} - Buffer: {current_size}/{required_size} samples")
+
+ await asyncio.sleep(0.1)
+
+ except Exception as e:
+ logger.error(f"{model_name} - Error in chunk processing: {e}")
+ import traceback
+ traceback.print_exc()
+ await asyncio.sleep(1)
+
+ async def _monitor_for_auto_final(self):
+ """Monitor for auto-final conditions with model-specific timeouts"""
+ model_name = "NeMo" if self.use_nemo else "Whisper"
+ timeout = 2.0 if self.use_nemo else 3.0 # Longer timeout for Whisper
+
+ logger.info(f"Starting auto-final monitoring for {model_name} (timeout: {timeout}s)")
+
+ while self.running:
+ try:
+ current_time = time.time()
+
+ if (self.interim_count >= self.min_interim_count and
+ self.last_interim_time is not None and
+ (current_time - self.last_interim_time) >= timeout and
+ not self.final_sent and
+ self.accumulated_transcript.strip()):
+
+ silence_duration = current_time - self.last_interim_time
+ logger.info(f"Auto-final triggered for segment #{self.segment_number} ({model_name}) - "
+ f"Interim count: {self.interim_count}, Silence: {silence_duration:.1f}s")
+
+ await self.send_transcription(self.accumulated_transcript, is_final=True)
+ await self.start_new_segment()
+
+ await asyncio.sleep(0.5) # Check every 500ms
+
+ except Exception as e:
+ logger.error(f"Error in auto-final monitoring: {e}")
+ await asyncio.sleep(0.5)
+
+
+
+ async def send_transcription(self, text, is_final=True, confidence=0.9):
+ """Send transcription in jambonz format"""
+ try:
+ # Apply number conversion only for Whisper
+ if not self.use_nemo and is_final:
+ original_text = text
+ converted_text = convert_arabic_numbers_whisper(text)
+
+ if original_text != converted_text:
+ logger.info(f"Whisper - Arabic numbers converted: '{original_text}' -> '{converted_text}'")
+ text = converted_text
+
+ message = {
+ "type": "transcription",
+ "is_final": True, # Always send as final
+ "alternatives": [
+ {
+ "transcript": text,
+ "confidence": confidence
+ }
+ ],
+ "language": self.config.get("language", "ar-EG"),
+ "channel": 1
+ }
+
+ await self.websocket.send(json.dumps(message))
+ self.final_sent = True
+
+ model_name = "NeMo" if self.use_nemo else "Whisper"
+ logger.info(f"Sent FINAL transcription ({model_name}): '{text}'")
+
+ except Exception as e:
+ logger.error(f"Error sending transcription: {e}")
+
+ async def send_error(self, error_message):
+ """Send error message in jambonz format"""
+ try:
+ message = {
+ "type": "error",
+ "error": error_message
+ }
+ await self.websocket.send(json.dumps(message))
+ logger.error(f"Sent error: {error_message}")
+ except Exception as e:
+ logger.error(f"Error sending error message: {e}")
+
+async def handle_jambonz_websocket(websocket):
+ """Handle jambonz WebSocket connections"""
+
+ client_id = f"jambonz_{id(websocket)}"
+ logger.info(f"New unified STT connection: {client_id}")
+
+ handler = UnifiedSTTHandler(websocket)
+
+ try:
+ async for message in websocket:
+ try:
+ if isinstance(message, str):
+ data = json.loads(message)
+ message_type = data.get("type")
+
+ if message_type == "start":
+ logger.info(f"Received start message: {data}")
+ await handler.start_processing(data)
+
+ elif message_type == "stop":
+ logger.info("Received stop message - closing WebSocket")
+ await handler.stop_processing()
+ await websocket.close(code=1000, reason="Session stopped by client")
+ break
+
+ else:
+ logger.warning(f"Unknown message type: {message_type}")
+ await handler.send_error(f"Unknown message type: {message_type}")
+
+ else:
+ # Handle binary audio data
+ if not handler.running or handler.audio_buffer is None:
+ logger.warning("Received audio data outside of active session")
+ await handler.send_error("Received audio before start message or after stop")
+ continue
+
+ await handler.add_audio_data(message)
+
+ except json.JSONDecodeError as e:
+ logger.error(f"JSON decode error: {e}")
+ await handler.send_error(f"Invalid JSON: {str(e)}")
+ except Exception as e:
+ logger.error(f"Error processing message: {e}")
+ await handler.send_error(f"Processing error: {str(e)}")
+
+ except websockets.exceptions.ConnectionClosed:
+ logger.info(f"Unified STT connection closed: {client_id}")
+ except Exception as e:
+ logger.error(f"Unified STT WebSocket error: {e}")
+ try:
+ await handler.send_error(str(e))
+ except:
+ pass
+ finally:
+ if handler.running:
+ await handler.stop_processing()
+ logger.info(f"Unified STT connection ended: {client_id}")
+
+async def main():
+ """Start the Unified Arabic STT WebSocket server"""
+ logger.info("Starting Unified Arabic STT WebSocket server on port 3007...")
+
+ # Check model availability
+ models_available = []
+ if asr_model_nemo is not None:
+ models_available.append("NeMo FastConformer (ar-EG)")
+ if whisper_model is not None:
+ models_available.append("Whisper large-v3 (ar-EG-whis)")
+
+ if not models_available:
+ logger.error("No models available! Please check model paths and installations.")
+ return
+
+ # Start WebSocket server
+ server = await websockets.serve(
+ handle_jambonz_websocket,
+ "0.0.0.0",
+ 3007,
+ ping_interval=20,
+ ping_timeout=10,
+ close_timeout=10
+ )
+
+ logger.info("Unified Arabic STT WebSocket server started on ws://0.0.0.0:3007")
+ logger.info("Ready to handle jambonz STT requests with both models")
+ logger.info("ROUTING:")
+ logger.info("- language: 'ar-EG' → NeMo FastConformer (with built-in number conversion)")
+ logger.info("- language: 'ar-EG-whis' → Whisper large-v3 (with pyarabic number conversion)")
+ logger.info("FEATURES:")
+ logger.info("- Continuous transcription with segmentation")
+ logger.info("- Voice Activity Detection")
+ logger.info("- Auto-final detection (2s silence timeout)")
+ logger.info("- Model-specific number conversion")
+ logger.info(f"AVAILABLE MODELS: {', '.join(models_available)}")
+
+ # Wait for the server to close
+ await server.wait_closed()
+
+if __name__ == "__main__":
+ print("=" * 80)
+ print("Unified Arabic STT Server (NeMo + Whisper)")
+ print("=" * 80)
+ print("WebSocket Port: 3007")
+ print("Protocol: jambonz STT API")
+ print("Audio Format: LINEAR16 PCM @ 8kHz → 16kHz")
+ print()
+ print("LANGUAGE ROUTING:")
+ print("- 'ar-EG' → NeMo FastConformer")
+ print(" • Built-in Arabic number word to digit conversion")
+ print(" • Optimized for Arabic dialects")
+ print("- 'ar-EG-whis' → Whisper large-v3")
+ print(" • pyarabic library number conversion (final transcripts only)")
+ print(" • OpenAI Whisper model")
+ print()
+ print("FEATURES:")
+ print("- Automatic model selection based on language parameter")
+ print("- Voice Activity Detection")
+ print("- Auto-final detection (2 seconds silence)")
+ print("- Model-specific number conversion strategies")
+ print("- Continuous transcription with segmentation")
+ print()
+
+ # Check model availability for startup info
+ nemo_status = "✓ Available" if asr_model_nemo is not None else "✗ Not Available"
+ whisper_status = "✓ Available" if whisper_model is not None else "✗ Not Available"
+ arabic_numbers_status = "✓ Available" if arabic_numbers_available else "✗ Not Available (install pyarabic)"
+
+ print("MODEL STATUS:")
+ print(f"- NeMo FastConformer: {nemo_status}")
+ print(f"- Whisper large-v3: {whisper_status}")
+ print(f"- pyarabic (Whisper numbers): {arabic_numbers_status}")
+ print("=" * 80)
+
+ try:
+ asyncio.run(main())
+ except KeyboardInterrupt:
+ print("\nShutting down unified server...")
+ except Exception as e:
+ print(f"Server error: {e}")
\ No newline at end of file
diff --git a/whisper_checkpoints/models--openai--whisper-large-v2/.no_exist/ae4642769ce2ad8fc292556ccea8e901f1530655/processor_config.json b/whisper_checkpoints/models--openai--whisper-large-v2/.no_exist/ae4642769ce2ad8fc292556ccea8e901f1530655/processor_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/whisper_checkpoints/models--openai--whisper-large-v2/blobs/1ce74630ed587e80f3db2b3d434f7026327f131e b/whisper_checkpoints/models--openai--whisper-large-v2/blobs/1ce74630ed587e80f3db2b3d434f7026327f131e
new file mode 100644
index 0000000000000000000000000000000000000000..1ce74630ed587e80f3db2b3d434f7026327f131e
--- /dev/null
+++ b/whisper_checkpoints/models--openai--whisper-large-v2/blobs/1ce74630ed587e80f3db2b3d434f7026327f131e
@@ -0,0 +1,144 @@
+{
+ "_name_or_path": "openai/whisper-large-v2",
+ "activation_dropout": 0.0,
+ "activation_function": "gelu",
+ "architectures": [
+ "WhisperForConditionalGeneration"
+ ],
+ "attention_dropout": 0.0,
+ "begin_suppress_tokens": [
+ 220,
+ 50257
+ ],
+ "bos_token_id": 50257,
+ "d_model": 1280,
+ "decoder_attention_heads": 20,
+ "decoder_ffn_dim": 5120,
+ "decoder_layerdrop": 0.0,
+ "decoder_layers": 32,
+ "decoder_start_token_id": 50258,
+ "dropout": 0.0,
+ "encoder_attention_heads": 20,
+ "encoder_ffn_dim": 5120,
+ "encoder_layerdrop": 0.0,
+ "encoder_layers": 32,
+ "eos_token_id": 50257,
+ "forced_decoder_ids": [
+ [
+ 1,
+ 50259
+ ],
+ [
+ 2,
+ 50359
+ ],
+ [
+ 3,
+ 50363
+ ]
+ ],
+ "init_std": 0.02,
+ "is_encoder_decoder": true,
+ "max_length": 448,
+ "max_source_positions": 1500,
+ "max_target_positions": 448,
+ "model_type": "whisper",
+ "num_hidden_layers": 32,
+ "num_mel_bins": 80,
+ "pad_token_id": 50257,
+ "scale_embedding": false,
+ "suppress_tokens": [
+ 1,
+ 2,
+ 7,
+ 8,
+ 9,
+ 10,
+ 14,
+ 25,
+ 26,
+ 27,
+ 28,
+ 29,
+ 31,
+ 58,
+ 59,
+ 60,
+ 61,
+ 62,
+ 63,
+ 90,
+ 91,
+ 92,
+ 93,
+ 359,
+ 503,
+ 522,
+ 542,
+ 873,
+ 893,
+ 902,
+ 918,
+ 922,
+ 931,
+ 1350,
+ 1853,
+ 1982,
+ 2460,
+ 2627,
+ 3246,
+ 3253,
+ 3268,
+ 3536,
+ 3846,
+ 3961,
+ 4183,
+ 4667,
+ 6585,
+ 6647,
+ 7273,
+ 9061,
+ 9383,
+ 10428,
+ 10929,
+ 11938,
+ 12033,
+ 12331,
+ 12562,
+ 13793,
+ 14157,
+ 14635,
+ 15265,
+ 15618,
+ 16553,
+ 16604,
+ 18362,
+ 18956,
+ 20075,
+ 21675,
+ 22520,
+ 26130,
+ 26161,
+ 26435,
+ 28279,
+ 29464,
+ 31650,
+ 32302,
+ 32470,
+ 36865,
+ 42863,
+ 47425,
+ 49870,
+ 50254,
+ 50258,
+ 50358,
+ 50359,
+ 50360,
+ 50361,
+ 50362
+ ],
+ "torch_dtype": "float32",
+ "transformers_version": "4.27.0.dev0",
+ "use_cache": true,
+ "vocab_size": 51865
+}
diff --git a/whisper_checkpoints/models--openai--whisper-large-v2/blobs/57a1ba2a82c093cabff2541409ae778c97145378b9ddfa722763cb1cb8f9020b b/whisper_checkpoints/models--openai--whisper-large-v2/blobs/57a1ba2a82c093cabff2541409ae778c97145378b9ddfa722763cb1cb8f9020b
new file mode 100644
index 0000000000000000000000000000000000000000..d1d93108cc2cd5fcfdff16a0f333820013891535
--- /dev/null
+++ b/whisper_checkpoints/models--openai--whisper-large-v2/blobs/57a1ba2a82c093cabff2541409ae778c97145378b9ddfa722763cb1cb8f9020b
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:57a1ba2a82c093cabff2541409ae778c97145378b9ddfa722763cb1cb8f9020b
+size 6173370152
diff --git a/whisper_checkpoints/models--openai--whisper-large-v2/blobs/c2048dfa9fd94a052e62e908d2c4dfb18534b4d2 b/whisper_checkpoints/models--openai--whisper-large-v2/blobs/c2048dfa9fd94a052e62e908d2c4dfb18534b4d2
new file mode 100644
index 0000000000000000000000000000000000000000..c2048dfa9fd94a052e62e908d2c4dfb18534b4d2
--- /dev/null
+++ b/whisper_checkpoints/models--openai--whisper-large-v2/blobs/c2048dfa9fd94a052e62e908d2c4dfb18534b4d2
@@ -0,0 +1,16256 @@
+{
+ "chunk_length": 30,
+ "feature_extractor_type": "WhisperFeatureExtractor",
+ "feature_size": 80,
+ "hop_length": 160,
+ "mel_filters": [
+ [
+ -0.0,
+ 0.02486259490251541,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.001990821911022067,
+ 0.022871771827340126,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.003981643822044134,
+ 0.02088095061480999,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0059724655002355576,
+ 0.018890129402279854,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.007963287644088268,
+ 0.01689930632710457,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.009954108856618404,
+ 0.014908484183251858,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.011944931000471115,
+ 0.012917662039399147,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.013935752213001251,
+ 0.010926840826869011,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.015926575288176537,
+ 0.0089360186830163,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.017917396500706673,
+ 0.006945197004824877,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.01990821771323681,
+ 0.004954374860972166,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.021899040788412094,
+ 0.0029635531827807426,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.02388986200094223,
+ 0.0009727313299663365,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.025880683213472366,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.025835324078798294,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0010180906392633915,
+ 0.023844502866268158,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.003008912317454815,
+ 0.021853681653738022,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.004999734461307526,
+ 0.019862858578562737,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.006990555673837662,
+ 0.0178720373660326,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.008981377817690372,
+ 0.015881216153502464,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.010972199961543083,
+ 0.013890394009649754,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.01296302117407322,
+ 0.011899571865797043,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.01495384331792593,
+ 0.009908749721944332,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.01694466546177864,
+ 0.007917927578091621,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.018935488536953926,
+ 0.005927106365561485,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.020874010398983955,
+ 0.004040425643324852,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.022114217281341553,
+ 0.0033186059445142746,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.02173672430217266,
+ 0.0036109676584601402,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.020497702062129974,
+ 0.004762193653732538,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.018486659973859787,
+ 0.006592618301510811,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.01585603691637516,
+ 0.00896277092397213,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.012738768011331558,
+ 0.011751330457627773,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.009250369854271412,
+ 0.014853144995868206,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.005490840878337622,
+ 0.018177473917603493,
+ 0.0028155462350696325,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0015463664894923568,
+ 0.01632951945066452,
+ 0.007420188747346401,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.011181050911545753,
+ 0.012018864043056965,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.006065350491553545,
+ 0.016561277210712433,
+ 0.004360878840088844,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0010297985281795263,
+ 0.012770536355674267,
+ 0.009707189165055752,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.006986402906477451,
+ 0.01485429983586073,
+ 0.004391219466924667,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.001418047584593296,
+ 0.011486922390758991,
+ 0.010089744813740253,
+ 0.00040022286702878773,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.005411104764789343,
+ 0.014735566452145576,
+ 0.006518189795315266,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.00827841367572546,
+ 0.012277561239898205,
+ 0.00396781275048852,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.002187808509916067,
+ 0.010184479877352715,
+ 0.00998187530785799,
+ 0.0022864851634949446,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.00386943481862545,
+ 0.011274894699454308,
+ 0.008466221392154694,
+ 0.0013397691072896123,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.004820294212549925,
+ 0.011678251437842846,
+ 0.007608682848513126,
+ 0.0010091039584949613,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.005156961735337973,
+ 0.011507894843816757,
+ 0.007301822770386934,
+ 0.0011901655234396458,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.004982104524970055,
+ 0.010863498784601688,
+ 0.007451189681887627,
+ 0.001791381393559277,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.004385921638458967,
+ 0.009832492098212242,
+ 0.007973956875503063,
+ 0.002732589840888977,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0034474546555429697,
+ 0.008491347543895245,
+ 0.008797688409686089,
+ 0.00394382793456316,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0022357646375894547,
+ 0.0069067515432834625,
+ 0.009859241545200348,
+ 0.005364237818866968,
+ 0.0008692338014952838,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0008110002381727099,
+ 0.005136650986969471,
+ 0.00946230161935091,
+ 0.0069410777650773525,
+ 0.0027783995028585196,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.003231203882023692,
+ 0.007237049750983715,
+ 0.00862883497029543,
+ 0.004773912951350212,
+ 0.0009189908159896731,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.001233637798577547,
+ 0.0049433219246566296,
+ 0.008653006516397,
+ 0.006818502210080624,
+ 0.003248583758249879,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0026164355222135782,
+ 0.006051854696124792,
+ 0.008880467154085636,
+ 0.005574479699134827,
+ 0.002268492942675948,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0002863667905330658,
+ 0.003467798000201583,
+ 0.0066492292098701,
+ 0.00787146482616663,
+ 0.004809896927326918,
+ 0.0017483289120718837,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0009245910914614797,
+ 0.0038708120118826628,
+ 0.00681703258305788,
+ 0.007283343467861414,
+ 0.004448124207556248,
+ 0.0016129047144204378,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0011703289346769452,
+ 0.003898728871718049,
+ 0.006627128925174475,
+ 0.0070473202504217625,
+ 0.004421714693307877,
+ 0.0017961094854399562,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0010892992140725255,
+ 0.003615982597693801,
+ 0.006142666097730398,
+ 0.007102936040610075,
+ 0.004671447444707155,
+ 0.002239959081634879,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0007392280967906117,
+ 0.0030791081953793764,
+ 0.005418988410383463,
+ 0.007397185545414686,
+ 0.005145462695509195,
+ 0.002893739379942417,
+ 0.0006420162972062826,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.00017068670422304422,
+ 0.0023375742603093386,
+ 0.004504461772739887,
+ 0.0066713495180010796,
+ 0.005798479542136192,
+ 0.003713231300935149,
+ 0.0016279831761494279,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0014345343224704266,
+ 0.0034412189852446318,
+ 0.005447904113680124,
+ 0.006591092795133591,
+ 0.004660011734813452,
+ 0.002728930441662669,
+ 0.0007978491485118866,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0004075043834745884,
+ 0.002265830524265766,
+ 0.004124156199395657,
+ 0.005982482805848122,
+ 0.005700822453945875,
+ 0.003912510350346565,
+ 0.0021241982467472553,
+ 0.0003358862304594368,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0010099108330905437,
+ 0.002730846870690584,
+ 0.004451782442629337,
+ 0.006172718480229378,
+ 0.005150905344635248,
+ 0.0034948070533573627,
+ 0.0018387088784947991,
+ 0.0001826105872169137,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0012943691108375788,
+ 0.002888072282075882,
+ 0.004481775686144829,
+ 0.006075479090213776,
+ 0.0048866597935557365,
+ 0.003353001084178686,
+ 0.0018193417927250266,
+ 0.00028568264679051936,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0013131388695910573,
+ 0.0027890161145478487,
+ 0.004264893010258675,
+ 0.0057407706044614315,
+ 0.004859979264438152,
+ 0.0034397069830447435,
+ 0.0020194342359900475,
+ 0.0005991620710119605,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0011121684219688177,
+ 0.002478930866345763,
+ 0.0038456933107227087,
+ 0.0052124555222690105,
+ 0.005028639920055866,
+ 0.0037133716978132725,
+ 0.002398103242740035,
+ 0.0010828346712514758,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0007317548734135926,
+ 0.0019974694587290287,
+ 0.003263183869421482,
+ 0.004528898745775223,
+ 0.005355686880648136,
+ 0.004137659445405006,
+ 0.0029196315445005894,
+ 0.0017016039928421378,
+ 0.0004835762665607035,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.00020713974663522094,
+ 0.0013792773243039846,
+ 0.0025514145381748676,
+ 0.003723552217707038,
+ 0.004895689897239208,
+ 0.004680895246565342,
+ 0.0035529187880456448,
+ 0.0024249425623565912,
+ 0.0012969663366675377,
+ 0.00016899015463422984,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0006545265205204487,
+ 0.0017400053329765797,
+ 0.0028254841454327106,
+ 0.003910962492227554,
+ 0.004996441304683685,
+ 0.0042709787376224995,
+ 0.003226396394893527,
+ 0.002181813819333911,
+ 0.0011372314766049385,
+ 9.264905384043232e-05,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.000854626705404371,
+ 0.001859853626228869,
+ 0.002865080488845706,
+ 0.003870307235047221,
+ 0.00487553421407938,
+ 0.00408313749358058,
+ 0.003115783678367734,
+ 0.0021484296303242445,
+ 0.001181075582280755,
+ 0.0002137213887181133,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0008483415003865957,
+ 0.0017792496364563704,
+ 0.0027101580053567886,
+ 0.0036410661414265633,
+ 0.004571974277496338,
+ 0.004079728852957487,
+ 0.003183893393725157,
+ 0.002288057701662183,
+ 0.0013922222424298525,
+ 0.0004963868414051831,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0006716204807162285,
+ 0.0015337044605985284,
+ 0.002395788673311472,
+ 0.0032578727696090937,
+ 0.004119956865906715,
+ 0.004227725323289633,
+ 0.0033981208689510822,
+ 0.0025685166474431753,
+ 0.0017389123095199466,
+ 0.0009093079133890569,
+ 7.970355363795534e-05,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0003559796023182571,
+ 0.0011543278815224767,
+ 0.0019526762189343572,
+ 0.002751024439930916,
+ 0.0035493727773427963,
+ 0.004347721114754677,
+ 0.0037299629766494036,
+ 0.002961693098768592,
+ 0.00219342322088778,
+ 0.0014251532265916467,
+ 0.0006568834069184959,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0006682946113869548,
+ 0.0014076193328946829,
+ 0.0021469437051564455,
+ 0.002886268775910139,
+ 0.0036255933810025454,
+ 0.004154576454311609,
+ 0.0034431067761033773,
+ 0.0027316368650645018,
+ 0.0020201667211949825,
+ 0.0013086966937407851,
+ 0.0005972267827019095,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 9.926508937496692e-05,
+ 0.0007839298341423273,
+ 0.001468594535253942,
+ 0.0021532592363655567,
+ 0.0028379240538924932,
+ 0.0035225888714194298,
+ 0.0039915177039802074,
+ 0.0033326479606330395,
+ 0.002673778682947159,
+ 0.002014909405261278,
+ 0.0013560398947447538,
+ 0.0006971705006435513,
+ 3.8301113818306476e-05,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.00010181095422012731,
+ 0.0007358568836934865,
+ 0.0013699028640985489,
+ 0.0020039486698806286,
+ 0.002637994708493352,
+ 0.0032720407471060753,
+ 0.003906086552888155,
+ 0.0033682563807815313,
+ 0.0027580985333770514,
+ 0.002147940918803215,
+ 0.0015377833042293787,
+ 0.0009276255150325596,
+ 0.000317467754939571,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0005530364578589797,
+ 0.0011402058880776167,
+ 0.0017273754347115755,
+ 0.0023145449813455343,
+ 0.002901714527979493,
+ 0.003488884074613452,
+ 0.003523340215906501,
+ 0.002958292607218027,
+ 0.002393245231360197,
+ 0.0018281979719176888,
+ 0.001263150479644537,
+ 0.0006981031037867069,
+ 0.0001330557424807921,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0002608386566862464,
+ 0.0008045974536798894,
+ 0.0013483562506735325,
+ 0.0018921148730441928,
+ 0.0024358737282454967,
+ 0.002979632467031479,
+ 0.003523391205817461,
+ 0.003251380519941449,
+ 0.0027281083166599274,
+ 0.002204835880547762,
+ 0.001681563793681562,
+ 0.001158291706815362,
+ 0.0006350195035338402,
+ 0.00011174729297636077,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0003849811910185963,
+ 0.0008885387214832008,
+ 0.001392096164636314,
+ 0.0018956535495817661,
+ 0.00239921105094254,
+ 0.002902768552303314,
+ 0.0034063260536640882,
+ 0.003132763085886836,
+ 0.0026481777895241976,
+ 0.0021635922603309155,
+ 0.0016790067311376333,
+ 0.0011944210855290294,
+ 0.0007098356145434082,
+ 0.00022525011445395648,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.000366741674952209,
+ 0.0008330700220540166,
+ 0.0012993983691558242,
+ 0.0017657268326729536,
+ 0.0022320549469441175,
+ 0.002698383294045925,
+ 0.0031647118739783764,
+ 0.003141313325613737,
+ 0.002692554146051407,
+ 0.0022437951993197203,
+ 0.00179503601975739,
+ 0.0013462770730257034,
+ 0.000897518009878695,
+ 0.0004487590049393475,
+ 0.0
+ ]
+ ],
+ "n_fft": 400,
+ "n_samples": 480000,
+ "nb_max_frames": 3000,
+ "padding_side": "right",
+ "padding_value": 0.0,
+ "processor_class": "WhisperProcessor",
+ "return_attention_mask": false,
+ "sampling_rate": 16000
+}
diff --git a/whisper_checkpoints/models--openai--whisper-large-v2/refs/main b/whisper_checkpoints/models--openai--whisper-large-v2/refs/main
new file mode 100644
index 0000000000000000000000000000000000000000..9cdf5916ff66ada36bfffb062ca7aee9a5e3af40
--- /dev/null
+++ b/whisper_checkpoints/models--openai--whisper-large-v2/refs/main
@@ -0,0 +1 @@
+ae4642769ce2ad8fc292556ccea8e901f1530655
\ No newline at end of file
diff --git a/whisper_checkpoints/models--openai--whisper-large-v2/snapshots/ae4642769ce2ad8fc292556ccea8e901f1530655/config.json b/whisper_checkpoints/models--openai--whisper-large-v2/snapshots/ae4642769ce2ad8fc292556ccea8e901f1530655/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..1ce74630ed587e80f3db2b3d434f7026327f131e
--- /dev/null
+++ b/whisper_checkpoints/models--openai--whisper-large-v2/snapshots/ae4642769ce2ad8fc292556ccea8e901f1530655/config.json
@@ -0,0 +1,144 @@
+{
+ "_name_or_path": "openai/whisper-large-v2",
+ "activation_dropout": 0.0,
+ "activation_function": "gelu",
+ "architectures": [
+ "WhisperForConditionalGeneration"
+ ],
+ "attention_dropout": 0.0,
+ "begin_suppress_tokens": [
+ 220,
+ 50257
+ ],
+ "bos_token_id": 50257,
+ "d_model": 1280,
+ "decoder_attention_heads": 20,
+ "decoder_ffn_dim": 5120,
+ "decoder_layerdrop": 0.0,
+ "decoder_layers": 32,
+ "decoder_start_token_id": 50258,
+ "dropout": 0.0,
+ "encoder_attention_heads": 20,
+ "encoder_ffn_dim": 5120,
+ "encoder_layerdrop": 0.0,
+ "encoder_layers": 32,
+ "eos_token_id": 50257,
+ "forced_decoder_ids": [
+ [
+ 1,
+ 50259
+ ],
+ [
+ 2,
+ 50359
+ ],
+ [
+ 3,
+ 50363
+ ]
+ ],
+ "init_std": 0.02,
+ "is_encoder_decoder": true,
+ "max_length": 448,
+ "max_source_positions": 1500,
+ "max_target_positions": 448,
+ "model_type": "whisper",
+ "num_hidden_layers": 32,
+ "num_mel_bins": 80,
+ "pad_token_id": 50257,
+ "scale_embedding": false,
+ "suppress_tokens": [
+ 1,
+ 2,
+ 7,
+ 8,
+ 9,
+ 10,
+ 14,
+ 25,
+ 26,
+ 27,
+ 28,
+ 29,
+ 31,
+ 58,
+ 59,
+ 60,
+ 61,
+ 62,
+ 63,
+ 90,
+ 91,
+ 92,
+ 93,
+ 359,
+ 503,
+ 522,
+ 542,
+ 873,
+ 893,
+ 902,
+ 918,
+ 922,
+ 931,
+ 1350,
+ 1853,
+ 1982,
+ 2460,
+ 2627,
+ 3246,
+ 3253,
+ 3268,
+ 3536,
+ 3846,
+ 3961,
+ 4183,
+ 4667,
+ 6585,
+ 6647,
+ 7273,
+ 9061,
+ 9383,
+ 10428,
+ 10929,
+ 11938,
+ 12033,
+ 12331,
+ 12562,
+ 13793,
+ 14157,
+ 14635,
+ 15265,
+ 15618,
+ 16553,
+ 16604,
+ 18362,
+ 18956,
+ 20075,
+ 21675,
+ 22520,
+ 26130,
+ 26161,
+ 26435,
+ 28279,
+ 29464,
+ 31650,
+ 32302,
+ 32470,
+ 36865,
+ 42863,
+ 47425,
+ 49870,
+ 50254,
+ 50258,
+ 50358,
+ 50359,
+ 50360,
+ 50361,
+ 50362
+ ],
+ "torch_dtype": "float32",
+ "transformers_version": "4.27.0.dev0",
+ "use_cache": true,
+ "vocab_size": 51865
+}
diff --git a/whisper_checkpoints/models--openai--whisper-large-v2/snapshots/ae4642769ce2ad8fc292556ccea8e901f1530655/model.safetensors b/whisper_checkpoints/models--openai--whisper-large-v2/snapshots/ae4642769ce2ad8fc292556ccea8e901f1530655/model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..d1d93108cc2cd5fcfdff16a0f333820013891535
--- /dev/null
+++ b/whisper_checkpoints/models--openai--whisper-large-v2/snapshots/ae4642769ce2ad8fc292556ccea8e901f1530655/model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:57a1ba2a82c093cabff2541409ae778c97145378b9ddfa722763cb1cb8f9020b
+size 6173370152
diff --git a/whisper_checkpoints/models--openai--whisper-large-v2/snapshots/ae4642769ce2ad8fc292556ccea8e901f1530655/preprocessor_config.json b/whisper_checkpoints/models--openai--whisper-large-v2/snapshots/ae4642769ce2ad8fc292556ccea8e901f1530655/preprocessor_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..c2048dfa9fd94a052e62e908d2c4dfb18534b4d2
--- /dev/null
+++ b/whisper_checkpoints/models--openai--whisper-large-v2/snapshots/ae4642769ce2ad8fc292556ccea8e901f1530655/preprocessor_config.json
@@ -0,0 +1,16256 @@
+{
+ "chunk_length": 30,
+ "feature_extractor_type": "WhisperFeatureExtractor",
+ "feature_size": 80,
+ "hop_length": 160,
+ "mel_filters": [
+ [
+ -0.0,
+ 0.02486259490251541,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.001990821911022067,
+ 0.022871771827340126,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.003981643822044134,
+ 0.02088095061480999,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0059724655002355576,
+ 0.018890129402279854,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.007963287644088268,
+ 0.01689930632710457,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.009954108856618404,
+ 0.014908484183251858,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.011944931000471115,
+ 0.012917662039399147,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.013935752213001251,
+ 0.010926840826869011,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.015926575288176537,
+ 0.0089360186830163,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.017917396500706673,
+ 0.006945197004824877,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.01990821771323681,
+ 0.004954374860972166,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.021899040788412094,
+ 0.0029635531827807426,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.02388986200094223,
+ 0.0009727313299663365,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.025880683213472366,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.025835324078798294,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0010180906392633915,
+ 0.023844502866268158,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.003008912317454815,
+ 0.021853681653738022,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.004999734461307526,
+ 0.019862858578562737,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.006990555673837662,
+ 0.0178720373660326,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.008981377817690372,
+ 0.015881216153502464,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.010972199961543083,
+ 0.013890394009649754,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.01296302117407322,
+ 0.011899571865797043,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.01495384331792593,
+ 0.009908749721944332,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.01694466546177864,
+ 0.007917927578091621,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.018935488536953926,
+ 0.005927106365561485,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.020874010398983955,
+ 0.004040425643324852,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.022114217281341553,
+ 0.0033186059445142746,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.02173672430217266,
+ 0.0036109676584601402,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.020497702062129974,
+ 0.004762193653732538,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.018486659973859787,
+ 0.006592618301510811,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.01585603691637516,
+ 0.00896277092397213,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.012738768011331558,
+ 0.011751330457627773,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.009250369854271412,
+ 0.014853144995868206,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.005490840878337622,
+ 0.018177473917603493,
+ 0.0028155462350696325,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0015463664894923568,
+ 0.01632951945066452,
+ 0.007420188747346401,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.011181050911545753,
+ 0.012018864043056965,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.006065350491553545,
+ 0.016561277210712433,
+ 0.004360878840088844,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0010297985281795263,
+ 0.012770536355674267,
+ 0.009707189165055752,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.006986402906477451,
+ 0.01485429983586073,
+ 0.004391219466924667,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.001418047584593296,
+ 0.011486922390758991,
+ 0.010089744813740253,
+ 0.00040022286702878773,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.005411104764789343,
+ 0.014735566452145576,
+ 0.006518189795315266,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.00827841367572546,
+ 0.012277561239898205,
+ 0.00396781275048852,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.002187808509916067,
+ 0.010184479877352715,
+ 0.00998187530785799,
+ 0.0022864851634949446,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.00386943481862545,
+ 0.011274894699454308,
+ 0.008466221392154694,
+ 0.0013397691072896123,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.004820294212549925,
+ 0.011678251437842846,
+ 0.007608682848513126,
+ 0.0010091039584949613,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.005156961735337973,
+ 0.011507894843816757,
+ 0.007301822770386934,
+ 0.0011901655234396458,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.004982104524970055,
+ 0.010863498784601688,
+ 0.007451189681887627,
+ 0.001791381393559277,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.004385921638458967,
+ 0.009832492098212242,
+ 0.007973956875503063,
+ 0.002732589840888977,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0034474546555429697,
+ 0.008491347543895245,
+ 0.008797688409686089,
+ 0.00394382793456316,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0022357646375894547,
+ 0.0069067515432834625,
+ 0.009859241545200348,
+ 0.005364237818866968,
+ 0.0008692338014952838,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0008110002381727099,
+ 0.005136650986969471,
+ 0.00946230161935091,
+ 0.0069410777650773525,
+ 0.0027783995028585196,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.003231203882023692,
+ 0.007237049750983715,
+ 0.00862883497029543,
+ 0.004773912951350212,
+ 0.0009189908159896731,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.001233637798577547,
+ 0.0049433219246566296,
+ 0.008653006516397,
+ 0.006818502210080624,
+ 0.003248583758249879,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0026164355222135782,
+ 0.006051854696124792,
+ 0.008880467154085636,
+ 0.005574479699134827,
+ 0.002268492942675948,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0002863667905330658,
+ 0.003467798000201583,
+ 0.0066492292098701,
+ 0.00787146482616663,
+ 0.004809896927326918,
+ 0.0017483289120718837,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0009245910914614797,
+ 0.0038708120118826628,
+ 0.00681703258305788,
+ 0.007283343467861414,
+ 0.004448124207556248,
+ 0.0016129047144204378,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0011703289346769452,
+ 0.003898728871718049,
+ 0.006627128925174475,
+ 0.0070473202504217625,
+ 0.004421714693307877,
+ 0.0017961094854399562,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0010892992140725255,
+ 0.003615982597693801,
+ 0.006142666097730398,
+ 0.007102936040610075,
+ 0.004671447444707155,
+ 0.002239959081634879,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0007392280967906117,
+ 0.0030791081953793764,
+ 0.005418988410383463,
+ 0.007397185545414686,
+ 0.005145462695509195,
+ 0.002893739379942417,
+ 0.0006420162972062826,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.00017068670422304422,
+ 0.0023375742603093386,
+ 0.004504461772739887,
+ 0.0066713495180010796,
+ 0.005798479542136192,
+ 0.003713231300935149,
+ 0.0016279831761494279,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0014345343224704266,
+ 0.0034412189852446318,
+ 0.005447904113680124,
+ 0.006591092795133591,
+ 0.004660011734813452,
+ 0.002728930441662669,
+ 0.0007978491485118866,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0004075043834745884,
+ 0.002265830524265766,
+ 0.004124156199395657,
+ 0.005982482805848122,
+ 0.005700822453945875,
+ 0.003912510350346565,
+ 0.0021241982467472553,
+ 0.0003358862304594368,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0010099108330905437,
+ 0.002730846870690584,
+ 0.004451782442629337,
+ 0.006172718480229378,
+ 0.005150905344635248,
+ 0.0034948070533573627,
+ 0.0018387088784947991,
+ 0.0001826105872169137,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0012943691108375788,
+ 0.002888072282075882,
+ 0.004481775686144829,
+ 0.006075479090213776,
+ 0.0048866597935557365,
+ 0.003353001084178686,
+ 0.0018193417927250266,
+ 0.00028568264679051936,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0013131388695910573,
+ 0.0027890161145478487,
+ 0.004264893010258675,
+ 0.0057407706044614315,
+ 0.004859979264438152,
+ 0.0034397069830447435,
+ 0.0020194342359900475,
+ 0.0005991620710119605,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0011121684219688177,
+ 0.002478930866345763,
+ 0.0038456933107227087,
+ 0.0052124555222690105,
+ 0.005028639920055866,
+ 0.0037133716978132725,
+ 0.002398103242740035,
+ 0.0010828346712514758,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0007317548734135926,
+ 0.0019974694587290287,
+ 0.003263183869421482,
+ 0.004528898745775223,
+ 0.005355686880648136,
+ 0.004137659445405006,
+ 0.0029196315445005894,
+ 0.0017016039928421378,
+ 0.0004835762665607035,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.00020713974663522094,
+ 0.0013792773243039846,
+ 0.0025514145381748676,
+ 0.003723552217707038,
+ 0.004895689897239208,
+ 0.004680895246565342,
+ 0.0035529187880456448,
+ 0.0024249425623565912,
+ 0.0012969663366675377,
+ 0.00016899015463422984,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0006545265205204487,
+ 0.0017400053329765797,
+ 0.0028254841454327106,
+ 0.003910962492227554,
+ 0.004996441304683685,
+ 0.0042709787376224995,
+ 0.003226396394893527,
+ 0.002181813819333911,
+ 0.0011372314766049385,
+ 9.264905384043232e-05,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.000854626705404371,
+ 0.001859853626228869,
+ 0.002865080488845706,
+ 0.003870307235047221,
+ 0.00487553421407938,
+ 0.00408313749358058,
+ 0.003115783678367734,
+ 0.0021484296303242445,
+ 0.001181075582280755,
+ 0.0002137213887181133,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0008483415003865957,
+ 0.0017792496364563704,
+ 0.0027101580053567886,
+ 0.0036410661414265633,
+ 0.004571974277496338,
+ 0.004079728852957487,
+ 0.003183893393725157,
+ 0.002288057701662183,
+ 0.0013922222424298525,
+ 0.0004963868414051831,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0006716204807162285,
+ 0.0015337044605985284,
+ 0.002395788673311472,
+ 0.0032578727696090937,
+ 0.004119956865906715,
+ 0.004227725323289633,
+ 0.0033981208689510822,
+ 0.0025685166474431753,
+ 0.0017389123095199466,
+ 0.0009093079133890569,
+ 7.970355363795534e-05,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0003559796023182571,
+ 0.0011543278815224767,
+ 0.0019526762189343572,
+ 0.002751024439930916,
+ 0.0035493727773427963,
+ 0.004347721114754677,
+ 0.0037299629766494036,
+ 0.002961693098768592,
+ 0.00219342322088778,
+ 0.0014251532265916467,
+ 0.0006568834069184959,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0006682946113869548,
+ 0.0014076193328946829,
+ 0.0021469437051564455,
+ 0.002886268775910139,
+ 0.0036255933810025454,
+ 0.004154576454311609,
+ 0.0034431067761033773,
+ 0.0027316368650645018,
+ 0.0020201667211949825,
+ 0.0013086966937407851,
+ 0.0005972267827019095,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 9.926508937496692e-05,
+ 0.0007839298341423273,
+ 0.001468594535253942,
+ 0.0021532592363655567,
+ 0.0028379240538924932,
+ 0.0035225888714194298,
+ 0.0039915177039802074,
+ 0.0033326479606330395,
+ 0.002673778682947159,
+ 0.002014909405261278,
+ 0.0013560398947447538,
+ 0.0006971705006435513,
+ 3.8301113818306476e-05,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.00010181095422012731,
+ 0.0007358568836934865,
+ 0.0013699028640985489,
+ 0.0020039486698806286,
+ 0.002637994708493352,
+ 0.0032720407471060753,
+ 0.003906086552888155,
+ 0.0033682563807815313,
+ 0.0027580985333770514,
+ 0.002147940918803215,
+ 0.0015377833042293787,
+ 0.0009276255150325596,
+ 0.000317467754939571,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0005530364578589797,
+ 0.0011402058880776167,
+ 0.0017273754347115755,
+ 0.0023145449813455343,
+ 0.002901714527979493,
+ 0.003488884074613452,
+ 0.003523340215906501,
+ 0.002958292607218027,
+ 0.002393245231360197,
+ 0.0018281979719176888,
+ 0.001263150479644537,
+ 0.0006981031037867069,
+ 0.0001330557424807921,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0002608386566862464,
+ 0.0008045974536798894,
+ 0.0013483562506735325,
+ 0.0018921148730441928,
+ 0.0024358737282454967,
+ 0.002979632467031479,
+ 0.003523391205817461,
+ 0.003251380519941449,
+ 0.0027281083166599274,
+ 0.002204835880547762,
+ 0.001681563793681562,
+ 0.001158291706815362,
+ 0.0006350195035338402,
+ 0.00011174729297636077,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0003849811910185963,
+ 0.0008885387214832008,
+ 0.001392096164636314,
+ 0.0018956535495817661,
+ 0.00239921105094254,
+ 0.002902768552303314,
+ 0.0034063260536640882,
+ 0.003132763085886836,
+ 0.0026481777895241976,
+ 0.0021635922603309155,
+ 0.0016790067311376333,
+ 0.0011944210855290294,
+ 0.0007098356145434082,
+ 0.00022525011445395648,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0
+ ],
+ [
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.0,
+ 0.000366741674952209,
+ 0.0008330700220540166,
+ 0.0012993983691558242,
+ 0.0017657268326729536,
+ 0.0022320549469441175,
+ 0.002698383294045925,
+ 0.0031647118739783764,
+ 0.003141313325613737,
+ 0.002692554146051407,
+ 0.0022437951993197203,
+ 0.00179503601975739,
+ 0.0013462770730257034,
+ 0.000897518009878695,
+ 0.0004487590049393475,
+ 0.0
+ ]
+ ],
+ "n_fft": 400,
+ "n_samples": 480000,
+ "nb_max_frames": 3000,
+ "padding_side": "right",
+ "padding_value": 0.0,
+ "processor_class": "WhisperProcessor",
+ "return_attention_mask": false,
+ "sampling_rate": 16000
+}