athmontech commited on
Commit
130ce6d
·
1 Parent(s): 7a2aef9

Remove Hausa language support - model discontinued

Browse files
EMERGENCY_FLUTTER_FIX.md ADDED
@@ -0,0 +1,284 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # 🚨 EMERGENCY Flutter Fix - 404 & Microphone Issues
2
+
3
+ ## ⚡ **QUICK FIX 1: API URL (404 Error)**
4
+
5
+ Your Hugging Face Space URL format is incorrect. Try these URLs in order:
6
+
7
+ ### **Option A: Try this URL format:**
8
+ ```dart
9
+ static const String baseUrl = 'https://carsaai-carsa-api.hf.space';
10
+ ```
11
+
12
+ ### **Option B: If Option A fails, try:**
13
+ ```dart
14
+ static const String baseUrl = 'https://carsaai-carsa-api.hf.space:7860';
15
+ ```
16
+
17
+ ### **Option C: If both fail, use direct Space URL:**
18
+ ```dart
19
+ static const String baseUrl = 'https://huggingface.co/spaces/CarsaAI/carsa_api/proxy';
20
+ ```
21
+
22
+ ## ⚡ **QUICK FIX 2: Microphone Crash**
23
+
24
+ ### **Step 1: Update pubspec.yaml**
25
+ ```yaml
26
+ dependencies:
27
+ flutter:
28
+ sdk: flutter
29
+ http: ^1.1.0
30
+ # ADD THESE IMMEDIATELY:
31
+ permission_handler: ^11.0.1
32
+ flutter_sound: ^9.2.13
33
+ path_provider: ^2.1.1
34
+ ```
35
+
36
+ ### **Step 2: Run this command:**
37
+ ```bash
38
+ flutter pub get
39
+ ```
40
+
41
+ ### **Step 3: Update AndroidManifest.xml**
42
+
43
+ File: `android/app/src/main/AndroidManifest.xml`
44
+
45
+ ```xml
46
+ <manifest xmlns:android="http://schemas.android.com/apk/res/android">
47
+ <!-- ADD THESE PERMISSIONS -->
48
+ <uses-permission android:name="android.permission.INTERNET" />
49
+ <uses-permission android:name="android.permission.RECORD_AUDIO" />
50
+ <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
51
+
52
+ <application
53
+ android:label="carsa_ai"
54
+ android:name="${applicationName}"
55
+ android:icon="@mipmap/ic_launcher">
56
+ <!-- Your existing app configuration -->
57
+ </application>
58
+ </manifest>
59
+ ```
60
+
61
+ ### **Step 4: Create Simple Audio Service**
62
+
63
+ Create `lib/services/simple_audio_service.dart`:
64
+
65
+ ```dart
66
+ import 'dart:io';
67
+ import 'package:flutter_sound/flutter_sound.dart';
68
+ import 'package:permission_handler.dart';
69
+ import 'package:path_provider/path_provider.dart';
70
+
71
+ class SimpleAudioService {
72
+ static FlutterSoundRecorder? _recorder;
73
+ static bool _isInitialized = false;
74
+
75
+ static Future<void> init() async {
76
+ if (_isInitialized) return;
77
+
78
+ _recorder = FlutterSoundRecorder();
79
+ await _recorder!.openRecorder();
80
+ _isInitialized = true;
81
+ print('✅ Audio service initialized');
82
+ }
83
+
84
+ static Future<bool> checkPermissions() async {
85
+ var status = await Permission.microphone.status;
86
+ if (!status.isGranted) {
87
+ status = await Permission.microphone.request();
88
+ }
89
+ print('🎤 Microphone permission: ${status.isGranted}');
90
+ return status.isGranted;
91
+ }
92
+
93
+ static Future<String?> startRecording() async {
94
+ try {
95
+ await init();
96
+
97
+ if (!await checkPermissions()) {
98
+ throw Exception('Microphone permission denied');
99
+ }
100
+
101
+ final directory = await getTemporaryDirectory();
102
+ final filePath = '${directory.path}/recording.wav';
103
+
104
+ await _recorder!.startRecorder(
105
+ toFile: filePath,
106
+ codec: Codec.pcm16WAV,
107
+ );
108
+
109
+ print('🔴 Recording started: $filePath');
110
+ return filePath;
111
+ } catch (e) {
112
+ print('❌ Recording error: $e');
113
+ return null;
114
+ }
115
+ }
116
+
117
+ static Future<String?> stopRecording() async {
118
+ try {
119
+ final path = await _recorder!.stopRecorder();
120
+ print('⏹️ Recording stopped: $path');
121
+ return path;
122
+ } catch (e) {
123
+ print('❌ Stop recording error: $e');
124
+ return null;
125
+ }
126
+ }
127
+ }
128
+ ```
129
+
130
+ ### **Step 5: Update Your UI (Quick Version)**
131
+
132
+ ```dart
133
+ import 'package:flutter/material.dart';
134
+ import 'services/api_service.dart';
135
+ import 'services/simple_audio_service.dart';
136
+
137
+ class QuickFixScreen extends StatefulWidget {
138
+ @override
139
+ _QuickFixScreenState createState() => _QuickFixScreenState();
140
+ }
141
+
142
+ class _QuickFixScreenState extends State<QuickFixScreen> {
143
+ bool isRecording = false;
144
+ bool isLoading = false;
145
+ String result = '';
146
+
147
+ @override
148
+ Widget build(BuildContext context) {
149
+ return Scaffold(
150
+ appBar: AppBar(title: Text('Carsa AI - Emergency Fix')),
151
+ body: Padding(
152
+ padding: EdgeInsets.all(20),
153
+ child: Column(
154
+ mainAxisAlignment: MainAxisAlignment.center,
155
+ children: [
156
+ // Test Translation Button
157
+ ElevatedButton(
158
+ onPressed: isLoading ? null : _testTranslation,
159
+ child: Text('Test Translation'),
160
+ ),
161
+
162
+ SizedBox(height: 20),
163
+
164
+ // Recording Button
165
+ GestureDetector(
166
+ onTap: isLoading ? null : (isRecording ? _stopRecording : _startRecording),
167
+ child: Container(
168
+ width: 100,
169
+ height: 100,
170
+ decoration: BoxDecoration(
171
+ color: isRecording ? Colors.red : Colors.blue,
172
+ shape: BoxShape.circle,
173
+ ),
174
+ child: Icon(
175
+ isRecording ? Icons.stop : Icons.mic,
176
+ color: Colors.white,
177
+ size: 50,
178
+ ),
179
+ ),
180
+ ),
181
+
182
+ SizedBox(height: 20),
183
+
184
+ if (isLoading)
185
+ CircularProgressIndicator(),
186
+
187
+ SizedBox(height: 20),
188
+
189
+ Text(
190
+ result,
191
+ style: TextStyle(fontSize: 16),
192
+ textAlign: TextAlign.center,
193
+ ),
194
+ ],
195
+ ),
196
+ ),
197
+ );
198
+ }
199
+
200
+ Future<void> _testTranslation() async {
201
+ setState(() {
202
+ isLoading = true;
203
+ result = 'Testing translation...';
204
+ });
205
+
206
+ try {
207
+ final response = await ApiService.translateText('Hello', 'twi');
208
+ setState(() {
209
+ result = 'Translation SUCCESS: ${response['translated_text']}';
210
+ });
211
+ } catch (e) {
212
+ setState(() {
213
+ result = 'Translation ERROR: $e';
214
+ });
215
+ } finally {
216
+ setState(() => isLoading = false);
217
+ }
218
+ }
219
+
220
+ Future<void> _startRecording() async {
221
+ setState(() {
222
+ isLoading = true;
223
+ result = 'Starting recording...';
224
+ });
225
+
226
+ try {
227
+ final path = await SimpleAudioService.startRecording();
228
+ if (path != null) {
229
+ setState(() {
230
+ isRecording = true;
231
+ result = 'Recording... Tap to stop';
232
+ });
233
+ } else {
234
+ throw Exception('Failed to start recording');
235
+ }
236
+ } catch (e) {
237
+ setState(() {
238
+ result = 'Recording ERROR: $e';
239
+ });
240
+ } finally {
241
+ setState(() => isLoading = false);
242
+ }
243
+ }
244
+
245
+ Future<void> _stopRecording() async {
246
+ setState(() {
247
+ isLoading = true;
248
+ result = 'Stopping recording...';
249
+ });
250
+
251
+ try {
252
+ final path = await SimpleAudioService.stopRecording();
253
+ setState(() => isRecording = false);
254
+
255
+ if (path != null) {
256
+ // Try speech to text
257
+ final response = await ApiService.speechToText(path);
258
+ setState(() {
259
+ result = 'Speech-to-Text SUCCESS: ${response['transcribed_text']}';
260
+ });
261
+ }
262
+ } catch (e) {
263
+ setState(() {
264
+ isRecording = false;
265
+ result = 'Speech-to-Text ERROR: $e';
266
+ });
267
+ } finally {
268
+ setState(() => isLoading = false);
269
+ }
270
+ }
271
+ }
272
+ ```
273
+
274
+ ## 🎯 **IMMEDIATE ACTION:**
275
+
276
+ 1. **Try URL Option A first** in your API service
277
+ 2. **Add the dependencies** to pubspec.yaml
278
+ 3. **Run** `flutter pub get`
279
+ 4. **Add permissions** to AndroidManifest.xml
280
+ 5. **Test the quick fix screen**
281
+
282
+ If Option A doesn't work, try Option B, then Option C.
283
+
284
+ **This should fix both your 404 and microphone crashes immediately!** 🚀
FIND_CORRECT_URL.md ADDED
@@ -0,0 +1,129 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # 🔍 Find Your Correct API URL - 404 Troubleshooting
2
+
3
+ ## 🚨 **Your Space is giving 404 errors - Let's find the right URL!**
4
+
5
+ ### **Step 1: Check Your Space Status**
6
+
7
+ 1. **Go to your Space directly**: https://huggingface.co/spaces/CarsaAI/carsa_api
8
+ 2. **Look for these indicators:**
9
+ - ✅ **"Running"** status (green)
10
+ - ❌ **"Building"** status (yellow)
11
+ - ❌ **"Runtime Error"** status (red)
12
+ - ❌ **"Stopped"** status (gray)
13
+
14
+ ### **Step 2: If Your Space Shows "Running", Try These URLs:**
15
+
16
+ #### **URL Format 1:**
17
+ ```
18
+ https://carsaai-carsa-api.hf.space
19
+ ```
20
+
21
+ #### **URL Format 2 (with port):**
22
+ ```
23
+ https://carsaai-carsa-api.hf.space:7860
24
+ ```
25
+
26
+ #### **URL Format 3 (alternative naming):**
27
+ ```
28
+ https://carsa-ai-carsa-api.hf.space
29
+ ```
30
+
31
+ #### **URL Format 4 (proxy path):**
32
+ ```
33
+ https://huggingface.co/spaces/CarsaAI/carsa_api/proxy
34
+ ```
35
+
36
+ #### **URL Format 5 (direct embed):**
37
+ ```
38
+ https://carsaai-carsa-api.hf.space/health
39
+ ```
40
+
41
+ ### **Step 3: Test URLs in Your Browser**
42
+
43
+ **Open each URL in your browser and look for:**
44
+
45
+ ✅ **Working URL will show:**
46
+ ```json
47
+ {
48
+ "status": "Carsa AI API is running",
49
+ "version": "1.0.0",
50
+ "services": ["translation", "speech-to-text", "text-to-speech"]
51
+ }
52
+ ```
53
+
54
+ ❌ **404 Error will show:**
55
+ ```
56
+ 404 - Sorry, we can't find the page you are looking for.
57
+ ```
58
+
59
+ ### **Step 4: If ALL URLs Give 404**
60
+
61
+ Your Space might not be properly deployed. Check these:
62
+
63
+ #### **A. Check Space Logs:**
64
+ 1. Go to: https://huggingface.co/spaces/CarsaAI/carsa_api
65
+ 2. Click **"Logs"** tab
66
+ 3. Look for errors in the build/runtime logs
67
+
68
+ #### **B. Common Issues:**
69
+ - **Build Failed**: Space never started properly
70
+ - **Runtime Error**: Space started but crashed
71
+ - **Port Issues**: App running on wrong port
72
+ - **Branch Issues**: Code not on correct branch
73
+
74
+ #### **C. Force Rebuild:**
75
+ 1. Go to your Space settings
76
+ 2. Click **"Restart this Space"**
77
+ 3. Wait for rebuild to complete
78
+
79
+ ### **Step 5: Alternative URLs to Try**
80
+
81
+ If the standard format doesn't work, your Space might use:
82
+
83
+ ```
84
+ https://hf.space/carsaai/carsa_api
85
+ https://spaces.huggingface.co/CarsaAI/carsa_api
86
+ https://carsaai-carsa-api.hf.space/gradio_api/health
87
+ ```
88
+
89
+ ### **Step 6: Quick Debug Test**
90
+
91
+ **Test this simple URL in your browser:**
92
+ ```
93
+ https://carsaai-carsa-api.hf.space/docs
94
+ ```
95
+
96
+ If this works, you'll see the **FastAPI documentation page**.
97
+
98
+ ### **Step 7: Update Your Flutter App**
99
+
100
+ Once you find the working URL, update your Flutter `api_service.dart`:
101
+
102
+ ```dart
103
+ class ApiService {
104
+ // Replace with the URL that works from Step 2-3
105
+ static const String baseUrl = 'YOUR_WORKING_URL_HERE';
106
+
107
+ // Test translation endpoint
108
+ static Future<void> testConnection() async {
109
+ try {
110
+ final response = await http.get(Uri.parse('$baseUrl/health'));
111
+ print('✅ Connection test: ${response.statusCode}');
112
+ print('✅ Response: ${response.body}');
113
+ } catch (e) {
114
+ print('❌ Connection failed: $e');
115
+ }
116
+ }
117
+ }
118
+ ```
119
+
120
+ ---
121
+
122
+ ## 🎯 **IMMEDIATE ACTIONS:**
123
+
124
+ 1. **Check your Space status** at: https://huggingface.co/spaces/CarsaAI/carsa_api
125
+ 2. **Try each URL format** in your browser
126
+ 3. **Look at Space logs** if all URLs fail
127
+ 4. **Report back** which URL works (or if none work)
128
+
129
+ **Let me know what you see when you check your Space page!** 🔍
asr_engine.py CHANGED
@@ -179,17 +179,8 @@ class ASREngine:
179
  logger.warning("Empty audio array after preprocessing")
180
  return ""
181
 
182
- # Perform transcription with optimized settings
183
- result = self.transcriber(
184
- audio_array,
185
- generate_kwargs={
186
- "task": "transcribe",
187
- "language": "english",
188
- "do_sample": False,
189
- "num_beams": 1,
190
- "temperature": 0.0
191
- }
192
- )
193
 
194
  # Extract text from result
195
  if isinstance(result, dict):
 
179
  logger.warning("Empty audio array after preprocessing")
180
  return ""
181
 
182
+ # Perform transcription with compatible settings
183
+ result = self.transcriber(audio_array)
 
 
 
 
 
 
 
 
 
184
 
185
  # Extract text from result
186
  if isinstance(result, dict):
main.py CHANGED
@@ -109,6 +109,13 @@ def translate_text(request: TranslationRequest):
109
  if not translation_engine:
110
  raise HTTPException(status_code=503, detail="Translation Engine not available.")
111
 
 
 
 
 
 
 
 
112
  try:
113
  result = translation_engine.translate(request.text, request.target_lang)
114
  return {"translated_text": result}
 
109
  if not translation_engine:
110
  raise HTTPException(status_code=503, detail="Translation Engine not available.")
111
 
112
+ # Block Hausa requests (discontinued)
113
+ if request.target_lang.lower() == "hausa":
114
+ raise HTTPException(
115
+ status_code=400,
116
+ detail="Hausa translation has been discontinued. Please use Twi, Ga, or Ewe instead."
117
+ )
118
+
119
  try:
120
  result = translation_engine.translate(request.text, request.target_lang)
121
  return {"translated_text": result}
requirements-hf.txt CHANGED
@@ -5,8 +5,8 @@ python-multipart==0.0.20
5
 
6
  # AI/ML libraries
7
  torch==2.5.1
8
- transformers==4.48.0
9
- tokenizers==0.21.0
10
  accelerate==1.2.1
11
 
12
  # Audio processing
 
5
 
6
  # AI/ML libraries
7
  torch==2.5.1
8
+ transformers==4.44.0
9
+ tokenizers==0.19.1
10
  accelerate==1.2.1
11
 
12
  # Audio processing
t_space_urls.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ SSUUMMMMAARRYY OOFF LLEESSSS CCOOMMMMAANNDDSS
3
+
4
+ Commands marked with * may be preceded by a number, _N.
5
+ Notes in parentheses indicate the behavior if _N is given.
6
+ A key preceded by a caret indicates the Ctrl key; thus ^K is ctrl-K.
7
+
8
+ h H Display this help.
9
+ q :q Q :Q ZZ Exit.
10
+ ---------------------------------------------------------------------------
11
+
12
+ MMOOVVIINNGG
13
+
14
+ e ^E j ^N CR * Forward one line (or _N lines).
15
+ y ^Y k ^K ^P * Backward one line (or _N lines).
16
+ ESC-j * Forward one file line (or _N file lines).
17
+ ESC-k * Backward one file line (or _N file lines).
18
+ f ^F ^V SPACE * Forward one window (or _N lines).
19
+ b ^B ESC-v * Backward one window (or _N lines).
20
+ z * Forward one window (and set window to _N).
21
+ w * Backward one window (and set window to _N).
22
+ ESC-SPACE * Forward one window, but don't stop at end-of-file.
23
+ ESC-b * Backward one window, but don't stop at beginning-of-file.
24
+ d ^D * Forward one half-window (and set half-window to _N).
25
+ u ^U * Backward one half-window (and set half-window to _N).
26
+ ESC-) RightArrow * Right one half screen width (or _N positions).
27
+ ESC-( LeftArrow * Left one half screen width (or _N positions).
28
+ ESC-} ^RightArrow Right to last column displayed.
29
+ ESC-{ ^LeftArrow Left to first column.
30
+ F Forward forever; like "tail -f".
31
+ ESC-F Like F but stop when search pattern is found.
32
+ r ^R ^L Repaint screen.
33
+ R Repaint screen, discarding buffered input.
34
+ ---------------------------------------------------
35
+ Default "window" is the screen height.
36
+ Default "half-window" is half of the screen height.
37
+ ---------------------------------------------------------------------------
38
+
39
+ SSEEAARRCCHHIINNGG
40
+
41
+ /_p_a_t_t_e_r_n * Search forward for (_N-th) matching line.
42
+ ?_p_a_t_t_e_r_n * Search backward for (_N-th) matching line.
43
+ n * Repeat previous search (for _N-th occurrence).
44
+ N * Repeat previous search in reverse direction.
45
+ ESC-n * Repeat previous search, spanning files.
46
+ ESC-N * Repeat previous search, reverse dir. & spanning files.
47
+ ^O^N ^On * Search forward for (_N-th) OSC8 hyperlink.
48
+ ^O^P ^Op * Search backward for (_N-th) OSC8 hyperlink.
49
+ ^O^L ^Ol Jump to the currently selected OSC8 hyperlink.
50
+ ESC-u Undo (toggle) search highlighting.
translation_engine.py CHANGED
@@ -8,8 +8,6 @@ Supported Languages:
8
  - Twi (Akan) - 'twi'
9
  - Ga - 'ga'
10
  - Ewe - 'ewe'
11
- - Hausa - 'hausa'
12
- - Yoruba - 'yoruba'
13
  - Igbo - 'igbo'
14
  - Swahili - 'swahili'
15
  - Amharic - 'amharic'
@@ -54,7 +52,8 @@ class TranslationEngine:
54
  "twi": "Helsinki-NLP/opus-mt-en-tw",
55
  "ga": "Helsinki-NLP/opus-mt-en-gaa",
56
  "ewe": "Helsinki-NLP/opus-mt-en-ee",
57
- "hausa": "Helsinki-NLP/opus-mt-en-ha",
 
58
  # Note: Yoruba model temporarily disabled - no valid model found
59
  # "yoruba": "Helsinki-NLP/opus-mt-en-yo", # This model doesn't exist
60
  "igbo": "Helsinki-NLP/opus-mt-en-ig",
@@ -68,7 +67,7 @@ class TranslationEngine:
68
  self.translators = {}
69
 
70
  # Load critical models (the ones your Flutter app primarily uses)
71
- self.critical_languages = ["twi", "ga", "ewe", "hausa"]
72
  self._load_critical_models()
73
 
74
  logger.info("Translation Engine initialized successfully!")
 
8
  - Twi (Akan) - 'twi'
9
  - Ga - 'ga'
10
  - Ewe - 'ewe'
 
 
11
  - Igbo - 'igbo'
12
  - Swahili - 'swahili'
13
  - Amharic - 'amharic'
 
52
  "twi": "Helsinki-NLP/opus-mt-en-tw",
53
  "ga": "Helsinki-NLP/opus-mt-en-gaa",
54
  "ewe": "Helsinki-NLP/opus-mt-en-ee",
55
+ # Hausa removed - model discontinued
56
+ # "hausa": "Helsinki-NLP/opus-mt-en-ha",
57
  # Note: Yoruba model temporarily disabled - no valid model found
58
  # "yoruba": "Helsinki-NLP/opus-mt-en-yo", # This model doesn't exist
59
  "igbo": "Helsinki-NLP/opus-mt-en-ig",
 
67
  self.translators = {}
68
 
69
  # Load critical models (the ones your Flutter app primarily uses)
70
+ self.critical_languages = ["twi", "ga", "ewe"]
71
  self._load_critical_models()
72
 
73
  logger.info("Translation Engine initialized successfully!")