serichard1 commited on
Commit
7a52daf
Β·
1 Parent(s): d067e51

support all clients et upload files5

Browse files
Files changed (2) hide show
  1. app.py +71 -228
  2. gradio_mcp_server.py +9 -296
app.py CHANGED
@@ -150,8 +150,8 @@ class MCPClientWrapper:
150
  } for tool in response.tools]
151
 
152
  self.connected = True
153
- tool_count = len(self.tools)
154
- return f"βœ… Connected to LEXICON Agricultural API Server. Available tools: {tool_count} tools covering weather, cadastral data, CAP parcels, municipalities, production, phytosanitary, seeds, and viticulture."
155
  except Exception as e:
156
  self.connected = False
157
  return f"❌ Failed to connect to MCP server: {str(e)}"
@@ -169,7 +169,7 @@ class MCPClientWrapper:
169
 
170
  # Check file size (limit to 10MB)
171
  if file_size > 10 * 1024 * 1024:
172
- return f"\n\nπŸ“Ž **File Upload Error**: {file_name} is too large (>10MB). Please upload a smaller file."
173
 
174
  # Try to read as text
175
  encodings_to_try = ['utf-8', 'utf-16', 'latin-1', 'cp1252']
@@ -184,7 +184,7 @@ class MCPClientWrapper:
184
  if len(content) > max_chars:
185
  content = content[:max_chars] + f"\n\n[Content truncated - showing first {max_chars} characters of {len(content)} total]"
186
 
187
- file_info = f"\n\nπŸ“Ž **Uploaded File**: {file_name}"
188
  if mime_type:
189
  file_info += f" ({mime_type})"
190
  file_info += f" - {file_size:,} bytes\n\n```\n{content}\n```"
@@ -195,10 +195,10 @@ class MCPClientWrapper:
195
  continue
196
 
197
  # If all text encodings fail, it's likely a binary file
198
- return f"\n\nπŸ“Ž **File Upload**: {file_name} appears to be a binary file and cannot be displayed as text."
199
 
200
  except Exception as e:
201
- return f"\n\nπŸ“Ž **File Upload Error**: Could not read {file_name}: {str(e)}"
202
 
203
  def _convert_tools_for_provider(self, provider: str):
204
  """Convert MCP tools format to provider-specific format."""
@@ -273,7 +273,7 @@ class MCPClientWrapper:
273
  if not self.session or not self.connected:
274
  return history + [
275
  {"role": "user", "content": message},
276
- {"role": "assistant", "content": "❌ LEXICON Agricultural API server is not connected. Please check the connection status above."}
277
  ], gr.Textbox(value=""), gr.File(value=None)
278
 
279
  # Process uploaded file if present
@@ -334,7 +334,7 @@ class MCPClientWrapper:
334
 
335
  result_messages.append({
336
  "role": "assistant",
337
- "content": f"πŸ”§ I'll use the **{tool_name}** tool to fetch the requested data.",
338
  "metadata": {
339
  "title": f"Using tool: {tool_name}",
340
  "log": f"Parameters: {json.dumps(tool_args, ensure_ascii=True)}",
@@ -349,7 +349,7 @@ class MCPClientWrapper:
349
  result_content = "\n".join(str(item) for item in result_content)
350
 
351
  # Format the response
352
- formatted_response = self._format_api_response(result_content, tool_name)
353
  result_messages.append(formatted_response)
354
 
355
  # Let the LLM analyze and respond
@@ -383,7 +383,7 @@ class MCPClientWrapper:
383
 
384
  result_messages.append({
385
  "role": "assistant",
386
- "content": f"πŸ”§ I'll use the **{tool_name}** tool to fetch the requested data."
387
  })
388
 
389
  result = await self.session.call_tool(tool_name, tool_args)
@@ -391,7 +391,7 @@ class MCPClientWrapper:
391
  if isinstance(result_content, list):
392
  result_content = "\n".join(str(item) for item in result_content)
393
 
394
- formatted_response = self._format_api_response(result_content, tool_name)
395
  result_messages.append(formatted_response)
396
 
397
  return result_messages
@@ -415,7 +415,7 @@ class MCPClientWrapper:
415
 
416
  result_messages.append({
417
  "role": "assistant",
418
- "content": f"πŸ”§ I'll use the **{tool_name}** tool to fetch the requested data."
419
  })
420
 
421
  result = await self.session.call_tool(tool_name, tool_args)
@@ -423,45 +423,47 @@ class MCPClientWrapper:
423
  if isinstance(result_content, list):
424
  result_content = "\n".join(str(item) for item in result_content)
425
 
426
- formatted_response = self._format_api_response(result_content, tool_name)
427
  result_messages.append(formatted_response)
428
 
429
  return result_messages
430
 
431
- def _format_api_response(self, result_content: str, tool_name: str):
432
- """Format API response data."""
433
  try:
434
  result_json = json.loads(result_content)
435
 
436
  if isinstance(result_json, dict):
437
  if result_json.get("type") == "success":
438
- data = result_json.get("data", {})
 
439
 
440
- # Create formatted response based on tool type
441
- if "weather" in tool_name.lower():
442
- formatted_response = self._format_weather_data(data, tool_name)
443
- elif "cadastral" in tool_name.lower():
444
- formatted_response = self._format_cadastral_data(data, tool_name)
445
- elif "municipality" in tool_name.lower() or "municipalities" in tool_name.lower():
446
- formatted_response = self._format_municipality_data(data, tool_name)
447
- elif "cap" in tool_name.lower():
448
- formatted_response = self._format_cap_data(data, tool_name)
449
- elif "production" in tool_name.lower():
450
- formatted_response = self._format_production_data(data, tool_name)
451
- elif "phytosanitary" in tool_name.lower():
452
- formatted_response = self._format_phytosanitary_data(data, tool_name)
453
- elif "seed" in tool_name.lower() or "varieties" in tool_name.lower():
454
- formatted_response = self._format_seed_data(data, tool_name)
455
- elif "vine" in tool_name.lower():
456
- formatted_response = self._format_viticulture_data(data, tool_name)
457
- else:
458
- formatted_response = self._format_generic_data(data, tool_name)
 
459
 
460
  return {
461
  "role": "assistant",
462
  "content": formatted_response,
463
  "metadata": {
464
- "title": f"Data Retrieved: {tool_name}",
465
  "status": "done",
466
  "id": f"success_result_{tool_name}"
467
  }
@@ -469,16 +471,17 @@ class MCPClientWrapper:
469
 
470
  elif result_json.get("type") == "error":
471
  error_msg = result_json.get("message", "Unknown error occurred")
 
472
 
473
- error_response = f"## ❌ Error Fetching Data\n\n"
474
- error_response += f"**Tool:** {tool_name}\n"
475
  error_response += f"**Error:** {error_msg}\n\n"
476
 
477
  return {
478
  "role": "assistant",
479
  "content": error_response,
480
  "metadata": {
481
- "title": "API Error",
482
  "status": "error",
483
  "id": f"error_result_{tool_name}"
484
  }
@@ -489,139 +492,22 @@ class MCPClientWrapper:
489
 
490
  return {
491
  "role": "assistant",
492
- "content": "```json\n" + result_content + "\n```",
493
  "metadata": {
494
- "title": "Raw API Response",
495
  "status": "done",
496
  "id": f"raw_result_{tool_name}"
497
  }
498
  }
499
-
500
- def _format_weather_data(self, data, tool_name):
501
- """Format weather-specific data."""
502
- if "reports" in data:
503
- reports = data["reports"]
504
- if isinstance(reports, list) and len(reports) > 0:
505
- formatted_response = f"## 🌀️ Weather Data\n\n"
506
- formatted_response += f"**Found {len(reports)} weather reports**\n\n"
507
- for i, report in enumerate(reports[:3]):
508
- if isinstance(report, dict):
509
- timestamp = report.get("timestamp", "Unknown time")
510
- temperature = report.get("temperature", "N/A")
511
- humidity = report.get("humidity", "N/A")
512
- formatted_response += f"**Report {i+1}** ({timestamp}):\n"
513
- formatted_response += f"- Temperature: {temperature}\n"
514
- formatted_response += f"- Humidity: {humidity}\n\n"
515
-
516
- if len(reports) > 3:
517
- formatted_response += f"... and {len(reports) - 3} more reports\n\n"
518
- return formatted_response
519
-
520
- return self._format_generic_data(data, tool_name)
521
-
522
- def _format_cadastral_data(self, data, tool_name):
523
- """Format cadastral parcel data."""
524
- formatted_response = f"## πŸ—ΊοΈ Cadastral Data\n\n"
525
-
526
- if isinstance(data, list):
527
- formatted_response += f"**Found {len(data)} cadastral parcels**\n\n"
528
- for i, parcel in enumerate(data[:5]):
529
- if isinstance(parcel, dict):
530
- parcel_id = parcel.get("id", f"Parcel {i+1}")
531
- formatted_response += f"**{parcel_id}**\n"
532
- if "area" in parcel:
533
- formatted_response += f"- Area: {parcel['area']}\n"
534
- if "owner" in parcel:
535
- formatted_response += f"- Owner: {parcel['owner']}\n"
536
- formatted_response += "\n"
537
- if len(data) > 5:
538
- formatted_response += f"... and {len(data) - 5} more parcels\n\n"
539
-
540
- return formatted_response
541
-
542
- def _format_municipality_data(self, data, tool_name):
543
- """Format municipality data."""
544
- formatted_response = f"## πŸ›οΈ Municipality Data\n\n"
545
-
546
- if isinstance(data, list):
547
- formatted_response += f"**Found {len(data)} municipalities**\n\n"
548
- for i, municipality in enumerate(data[:5]):
549
- if isinstance(municipality, dict):
550
- name = municipality.get("name", f"Municipality {i+1}")
551
- formatted_response += f"**{name}**\n"
552
- if "code" in municipality:
553
- formatted_response += f"- Code: {municipality['code']}\n"
554
- if "population" in municipality:
555
- formatted_response += f"- Population: {municipality['population']}\n"
556
- formatted_response += "\n"
557
-
558
- return formatted_response
559
-
560
- def _format_cap_data(self, data, tool_name):
561
- """Format CAP parcel data."""
562
- formatted_response = f"## 🌾 CAP Parcel Data\n\n"
563
-
564
- if isinstance(data, list):
565
- formatted_response += f"**Found {len(data)} CAP parcels**\n\n"
566
- for i, parcel in enumerate(data[:5]):
567
- if isinstance(parcel, dict):
568
- parcel_id = parcel.get("cap_id", f"CAP Parcel {i+1}")
569
- formatted_response += f"**{parcel_id}**\n"
570
- if "area" in parcel:
571
- formatted_response += f"- Area: {parcel['area']}\n"
572
- if "crop_type" in parcel:
573
- formatted_response += f"- Crop Type: {parcel['crop_type']}\n"
574
- formatted_response += "\n"
575
-
576
- return formatted_response
577
-
578
- def _format_production_data(self, data, tool_name):
579
- """Format production data."""
580
- formatted_response = f"## 🌾 Production Data\n\n"
581
- return self._format_generic_data(data, tool_name, formatted_response)
582
-
583
- def _format_phytosanitary_data(self, data, tool_name):
584
- """Format phytosanitary data."""
585
- formatted_response = f"## πŸ›‘οΈ Phytosanitary Data\n\n"
586
- return self._format_generic_data(data, tool_name, formatted_response)
587
-
588
- def _format_seed_data(self, data, tool_name):
589
- """Format seed varieties data."""
590
- formatted_response = f"## 🌱 Seed Varieties Data\n\n"
591
- return self._format_generic_data(data, tool_name, formatted_response)
592
-
593
- def _format_viticulture_data(self, data, tool_name):
594
- """Format viticulture data."""
595
- formatted_response = f"## πŸ‡ Viticulture Data\n\n"
596
- return self._format_generic_data(data, tool_name, formatted_response)
597
-
598
- def _format_generic_data(self, data, tool_name, header="## πŸ“Š Data Retrieved\n\n"):
599
- """Format generic data response."""
600
- formatted_response = header
601
-
602
- if isinstance(data, list) and len(data) > 0:
603
- formatted_response += f"**Found {len(data)} items**\n\n"
604
- # Show first few items
605
- for i, item in enumerate(data[:3]):
606
- formatted_response += f"**Item {i+1}:**\n```json\n{json.dumps(item, indent=2)}\n```\n\n"
607
- if len(data) > 3:
608
- formatted_response += f"... and {len(data) - 3} more items\n\n"
609
- elif isinstance(data, dict):
610
- formatted_response += "**Data:**\n```json\n" + json.dumps(data, indent=2) + "\n```\n\n"
611
- else:
612
- formatted_response += f"**Data:** {data}\n\n"
613
-
614
- return formatted_response
615
 
616
  client = MCPClientWrapper()
617
 
618
  def gradio_interface():
619
  with gr.Blocks(title="MCP LEXICON", theme=gr.themes.Soft()) as demo:
620
- gr.Markdown("# 🌾 LEXICON AGRICULTURAL API ASSISTANT")
621
  gr.Markdown(
622
- "Your comprehensive AI assistant for agricultural and geographical data! I can access weather stations, "
623
- "cadastral parcels, CAP parcels, municipalities, production data, phytosanitary information, seed varieties, "
624
- "and viticulture data. Choose your preferred AI model below and upload files for additional context."
625
  )
626
 
627
  # LLM Provider Selection
@@ -652,7 +538,7 @@ def gradio_interface():
652
 
653
  with gr.Column(scale=1):
654
  current_model_display = gr.Textbox(
655
- label="πŸ“‹ Current Selection",
656
  value=f"{client.current_provider}: {client.current_model}",
657
  interactive=False
658
  )
@@ -661,7 +547,7 @@ def gradio_interface():
661
  status = gr.Textbox(
662
  label="πŸ”Œ Connection Status",
663
  interactive=False,
664
- value="πŸ”„ Connecting to LEXICON Agricultural API server..."
665
  )
666
 
667
  # Main chat interface
@@ -670,17 +556,17 @@ def gradio_interface():
670
  height=600,
671
  type="messages",
672
  show_copy_button=True,
673
- avatar_images=("πŸ‘€", "🌾"),
674
  bubble_full_width=False
675
  )
676
 
677
- # File upload component
678
  file_upload = gr.File(
679
  label="πŸ“Ž Upload File (optional)",
680
  file_count="single",
681
  file_types=[
682
  ".txt", ".md", ".py", ".js", ".html", ".css", ".json", ".csv",
683
- ".xml", ".yml", ".yaml", ".ini", ".cfg", ".log", ".sql", ".geojson"
684
  ],
685
  height=100
686
  )
@@ -688,8 +574,8 @@ def gradio_interface():
688
  # Input row
689
  with gr.Row(equal_height=True):
690
  msg = gr.Textbox(
691
- label="πŸ’¬ Ask about agricultural or geographical data",
692
- placeholder="e.g., 'List all weather stations', 'Find cadastral parcels in municipality X', 'What seed varieties are available?'",
693
  scale=4
694
  )
695
  with gr.Column(scale=1):
@@ -697,56 +583,19 @@ def gradio_interface():
697
  clear_btn = gr.Button("πŸ—‘οΈ Clear Chat", size="lg")
698
  reconnect_btn = gr.Button("πŸ”„ Reconnect", size="lg")
699
 
700
- # Example queries organized by category
701
- with gr.Accordion("πŸ’‘ Example Queries by Category", open=False):
702
- with gr.Row():
703
- with gr.Column():
704
- gr.Markdown("**🌀️ Weather**")
705
- gr.Examples(
706
- examples=[
707
- "List all weather stations",
708
- "Get weather data for station ABC123",
709
- "Show station information for NYC001"
710
- ],
711
- inputs=msg,
712
- label=None
713
- )
714
-
715
- gr.Markdown("**πŸ—ΊοΈ Cadastral & Geographical**")
716
- gr.Examples(
717
- examples=[
718
- "List all municipalities",
719
- "Get cadastral parcels",
720
- "Show municipality with ID 12345",
721
- "Get geolocation for cadastral parcel XYZ"
722
- ],
723
- inputs=msg,
724
- label=None
725
- )
726
-
727
- with gr.Column():
728
- gr.Markdown("**🌾 Agricultural**")
729
- gr.Examples(
730
- examples=[
731
- "List CAP parcels",
732
- "Show available productions",
733
- "Get seed varieties",
734
- "List vine varieties for viticulture"
735
- ],
736
- inputs=msg,
737
- label=None
738
- )
739
-
740
- gr.Markdown("**πŸ›‘οΈ Phytosanitary**")
741
- gr.Examples(
742
- examples=[
743
- "List phytosanitary products",
744
- "Show phytosanitary symbols",
745
- "Get cropsets information"
746
- ],
747
- inputs=msg,
748
- label=None
749
- )
750
 
751
  # Provider/Model update functions
752
  def update_models(provider):
@@ -819,17 +668,11 @@ if __name__ == "__main__":
819
  else:
820
  print("πŸ”‘ Available API keys:", ", ".join(available_keys))
821
 
822
- print("πŸš€ Starting Multi-LLM LEXICON Agricultural API Client...")
823
- print("πŸ”— Will auto-connect to gradio_mcp_server.py")
824
- print("🌐 API endpoint: https://lexicon.osfarm.org")
825
- print("πŸ“‚ File upload enabled - supports text, code, data files, and GeoJSON")
826
  print("πŸ€– Multi-LLM support: Claude, OpenAI, Mistral, Llama")
827
- print("🌾 Comprehensive agricultural data access:")
828
- print(" β€’ Weather stations and reports")
829
- print(" β€’ Cadastral and CAP parcels")
830
- print(" β€’ Municipalities and geographical references")
831
- print(" β€’ Production and phytosanitary data")
832
- print(" β€’ Seed varieties and viticulture information")
833
 
834
  interface = gradio_interface()
835
  interface.launch(debug=True, share=True)
 
150
  } for tool in response.tools]
151
 
152
  self.connected = True
153
+ tool_names = [tool["name"] for tool in self.tools]
154
+ return f"βœ… Connected to MCP Weather Server. Available tools: {', '.join(tool_names)}"
155
  except Exception as e:
156
  self.connected = False
157
  return f"❌ Failed to connect to MCP server: {str(e)}"
 
169
 
170
  # Check file size (limit to 10MB)
171
  if file_size > 10 * 1024 * 1024:
172
+ return f"\n\nπŸ” **File Upload Error**: {file_name} is too large (>10MB). Please upload a smaller file."
173
 
174
  # Try to read as text
175
  encodings_to_try = ['utf-8', 'utf-16', 'latin-1', 'cp1252']
 
184
  if len(content) > max_chars:
185
  content = content[:max_chars] + f"\n\n[Content truncated - showing first {max_chars} characters of {len(content)} total]"
186
 
187
+ file_info = f"\n\nπŸ” **Uploaded File**: {file_name}"
188
  if mime_type:
189
  file_info += f" ({mime_type})"
190
  file_info += f" - {file_size:,} bytes\n\n```\n{content}\n```"
 
195
  continue
196
 
197
  # If all text encodings fail, it's likely a binary file
198
+ return f"\n\nπŸ” **File Upload**: {file_name} appears to be a binary file and cannot be displayed as text."
199
 
200
  except Exception as e:
201
+ return f"\n\nπŸ” **File Upload Error**: Could not read {file_name}: {str(e)}"
202
 
203
  def _convert_tools_for_provider(self, provider: str):
204
  """Convert MCP tools format to provider-specific format."""
 
273
  if not self.session or not self.connected:
274
  return history + [
275
  {"role": "user", "content": message},
276
+ {"role": "assistant", "content": "❌ MCP weather server is not connected. Please check the connection status above."}
277
  ], gr.Textbox(value=""), gr.File(value=None)
278
 
279
  # Process uploaded file if present
 
334
 
335
  result_messages.append({
336
  "role": "assistant",
337
+ "content": f"πŸ”§ I'll use the **{tool_name}** tool to fetch the weather data you requested.",
338
  "metadata": {
339
  "title": f"Using tool: {tool_name}",
340
  "log": f"Parameters: {json.dumps(tool_args, ensure_ascii=True)}",
 
349
  result_content = "\n".join(str(item) for item in result_content)
350
 
351
  # Format the response
352
+ formatted_response = self._format_weather_response(result_content, tool_name)
353
  result_messages.append(formatted_response)
354
 
355
  # Let the LLM analyze and respond
 
383
 
384
  result_messages.append({
385
  "role": "assistant",
386
+ "content": f"πŸ”§ I'll use the **{tool_name}** tool to fetch the weather data you requested."
387
  })
388
 
389
  result = await self.session.call_tool(tool_name, tool_args)
 
391
  if isinstance(result_content, list):
392
  result_content = "\n".join(str(item) for item in result_content)
393
 
394
+ formatted_response = self._format_weather_response(result_content, tool_name)
395
  result_messages.append(formatted_response)
396
 
397
  return result_messages
 
415
 
416
  result_messages.append({
417
  "role": "assistant",
418
+ "content": f"πŸ”§ I'll use the **{tool_name}** tool to fetch the weather data you requested."
419
  })
420
 
421
  result = await self.session.call_tool(tool_name, tool_args)
 
423
  if isinstance(result_content, list):
424
  result_content = "\n".join(str(item) for item in result_content)
425
 
426
+ formatted_response = self._format_weather_response(result_content, tool_name)
427
  result_messages.append(formatted_response)
428
 
429
  return result_messages
430
 
431
+ def _format_weather_response(self, result_content: str, tool_name: str):
432
+ """Format weather data response."""
433
  try:
434
  result_json = json.loads(result_content)
435
 
436
  if isinstance(result_json, dict):
437
  if result_json.get("type") == "success":
438
+ station_code = result_json.get("station_code", "Unknown")
439
+ weather_data = result_json.get("data", {})
440
 
441
+ formatted_response = f"## 🌀️ Weather Data for Station: {station_code}\n\n"
442
+
443
+ if isinstance(weather_data, dict):
444
+ if "reports" in weather_data:
445
+ reports = weather_data["reports"]
446
+ if isinstance(reports, list) and len(reports) > 0:
447
+ formatted_response += f"**Found {len(reports)} weather reports**\n\n"
448
+ for i, report in enumerate(reports[:3]):
449
+ if isinstance(report, dict):
450
+ timestamp = report.get("timestamp", "Unknown time")
451
+ temperature = report.get("temperature", "N/A")
452
+ humidity = report.get("humidity", "N/A")
453
+ formatted_response += f"**Report {i+1}** ({timestamp}):\n"
454
+ formatted_response += f"- Temperature: {temperature}\n"
455
+ formatted_response += f"- Humidity: {humidity}\n\n"
456
+
457
+ if len(reports) > 3:
458
+ formatted_response += f"... and {len(reports) - 3} more reports\n\n"
459
+
460
+ formatted_response += "**Raw Data:**\n```json\n" + json.dumps(weather_data, indent=2) + "\n```"
461
 
462
  return {
463
  "role": "assistant",
464
  "content": formatted_response,
465
  "metadata": {
466
+ "title": f"Weather Data Retrieved",
467
  "status": "done",
468
  "id": f"success_result_{tool_name}"
469
  }
 
471
 
472
  elif result_json.get("type") == "error":
473
  error_msg = result_json.get("message", "Unknown error occurred")
474
+ station_code = result_json.get("station_code", "Unknown")
475
 
476
+ error_response = f"## ❌ Error Fetching Weather Data\n\n"
477
+ error_response += f"**Station:** {station_code}\n"
478
  error_response += f"**Error:** {error_msg}\n\n"
479
 
480
  return {
481
  "role": "assistant",
482
  "content": error_response,
483
  "metadata": {
484
+ "title": "Weather API Error",
485
  "status": "error",
486
  "id": f"error_result_{tool_name}"
487
  }
 
492
 
493
  return {
494
  "role": "assistant",
495
+ "content": "```\n" + result_content + "\n```",
496
  "metadata": {
497
+ "title": "Raw Tool Response",
498
  "status": "done",
499
  "id": f"raw_result_{tool_name}"
500
  }
501
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
502
 
503
  client = MCPClientWrapper()
504
 
505
  def gradio_interface():
506
  with gr.Blocks(title="MCP LEXICON", theme=gr.themes.Soft()) as demo:
507
+ gr.Markdown("# 🌀️ LEXICON CHATBOT - Multi-LLM Weather Assistant")
508
  gr.Markdown(
509
+ "Ask me about weather data from any weather station! I support multiple AI providers "
510
+ "and can process uploaded files for additional context. Choose your preferred AI model below."
 
511
  )
512
 
513
  # LLM Provider Selection
 
538
 
539
  with gr.Column(scale=1):
540
  current_model_display = gr.Textbox(
541
+ label="πŸ”„ Current Selection",
542
  value=f"{client.current_provider}: {client.current_model}",
543
  interactive=False
544
  )
 
547
  status = gr.Textbox(
548
  label="πŸ”Œ Connection Status",
549
  interactive=False,
550
+ value="πŸ”„ Connecting to weather server..."
551
  )
552
 
553
  # Main chat interface
 
556
  height=600,
557
  type="messages",
558
  show_copy_button=True,
559
+ avatar_images=("πŸ‘€", "πŸ€–"),
560
  bubble_full_width=False
561
  )
562
 
563
+ # File upload component (already exists in your code!)
564
  file_upload = gr.File(
565
  label="πŸ“Ž Upload File (optional)",
566
  file_count="single",
567
  file_types=[
568
  ".txt", ".md", ".py", ".js", ".html", ".css", ".json", ".csv",
569
+ ".xml", ".yml", ".yaml", ".ini", ".cfg", ".log", ".sql"
570
  ],
571
  height=100
572
  )
 
574
  # Input row
575
  with gr.Row(equal_height=True):
576
  msg = gr.Textbox(
577
+ label="πŸ’¬ Ask about weather data",
578
+ placeholder="e.g., 'Get weather data for station NYC001' or upload a file with additional context",
579
  scale=4
580
  )
581
  with gr.Column(scale=1):
 
583
  clear_btn = gr.Button("πŸ—‘οΈ Clear Chat", size="lg")
584
  reconnect_btn = gr.Button("πŸ”„ Reconnect", size="lg")
585
 
586
+ # Example queries
587
+ with gr.Row():
588
+ gr.Examples(
589
+ examples=[
590
+ "What weather stations are available?",
591
+ "Get weather data for station ABC123",
592
+ "Show me the latest hourly reports for station NYC001",
593
+ "Analyze the uploaded data and compare it with weather patterns",
594
+ "Explain the weather trends from the uploaded CSV file"
595
+ ],
596
+ inputs=msg,
597
+ label="πŸ’‘ Example Queries"
598
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
599
 
600
  # Provider/Model update functions
601
  def update_models(provider):
 
668
  else:
669
  print("πŸ”‘ Available API keys:", ", ".join(available_keys))
670
 
671
+ print("πŸš€ Starting Multi-LLM MCP Weather Client...")
672
+ print("πŸ”‘ Will auto-connect to gradio_mcp_server.py")
673
+ print("🌐 Weather API endpoint: https://lexicon.osfarm.org/weather/stations")
674
+ print("πŸ“Ž File upload enabled - supports text, code, and data files")
675
  print("πŸ€– Multi-LLM support: Claude, OpenAI, Mistral, Llama")
 
 
 
 
 
 
676
 
677
  interface = gradio_interface()
678
  interface.launch(debug=True, share=True)
gradio_mcp_server.py CHANGED
@@ -3,297 +3,11 @@ import json
3
  import sys
4
  import io
5
  import requests
6
- from typing import Optional
7
 
8
  sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
9
  sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace')
10
 
11
- mcp = FastMCP("lexicon_api_server")
12
-
13
- # Base URL for all API endpoints
14
- BASE_URL = "https://lexicon.osfarm.org"
15
-
16
- def make_api_request(url: str, params: dict = None, endpoint_name: str = "API") -> str:
17
- """Generic function to make API requests with consistent error handling."""
18
- try:
19
- response = requests.get(url, params=params, timeout=30)
20
- response.raise_for_status()
21
-
22
- data = response.json()
23
-
24
- return json.dumps({
25
- "type": "success",
26
- "data": data,
27
- "url": url,
28
- "params": params or {},
29
- "message": f"Successfully retrieved data from {endpoint_name}"
30
- }, indent=2)
31
-
32
- except requests.exceptions.ConnectionError:
33
- return json.dumps({
34
- "type": "error",
35
- "url": url,
36
- "message": f"Could not connect to {endpoint_name}. Please ensure the service is running."
37
- })
38
- except requests.exceptions.Timeout:
39
- return json.dumps({
40
- "type": "error",
41
- "url": url,
42
- "message": f"Request timed out while fetching data from {endpoint_name}"
43
- })
44
- except requests.exceptions.HTTPError as e:
45
- status_code = e.response.status_code if e.response else "unknown"
46
- return json.dumps({
47
- "type": "error",
48
- "url": url,
49
- "status_code": status_code,
50
- "message": f"HTTP error {status_code} when fetching data from {endpoint_name}. Resource may not exist or API may be unavailable."
51
- })
52
- except json.JSONDecodeError:
53
- return json.dumps({
54
- "type": "error",
55
- "url": url,
56
- "message": f"Invalid JSON response received from {endpoint_name}"
57
- })
58
- except Exception as e:
59
- return json.dumps({
60
- "type": "error",
61
- "url": url,
62
- "message": f"Unexpected error fetching data from {endpoint_name}: {str(e)}"
63
- })
64
-
65
- # TOOLS - PARCEL IDENTIFIER
66
- @mcp.tool()
67
- async def get_parcel_identifier() -> str:
68
- """Get parcel identifier tools/information in JSON format.
69
-
70
- Returns:
71
- JSON string containing parcel identifier data or error information
72
- """
73
- url = f"{BASE_URL}/tools/parcel-identifier.json"
74
- return make_api_request(url, endpoint_name="Parcel Identifier")
75
-
76
- @mcp.tool()
77
- async def get_parcel_identifier_geojson() -> str:
78
- """Get parcel identifier tools/information in GeoJSON format.
79
-
80
- Returns:
81
- JSON string containing parcel identifier GeoJSON data or error information
82
- """
83
- url = f"{BASE_URL}/tools/parcel-identifier.geojson"
84
- return make_api_request(url, endpoint_name="Parcel Identifier GeoJSON")
85
-
86
- # GEOGRAPHICAL REFERENCES - CADASTRAL PARCELS
87
- @mcp.tool()
88
- async def get_cadastral_parcels() -> str:
89
- """Get list of all cadastral parcels.
90
-
91
- Returns:
92
- JSON string containing cadastral parcels data or error information
93
- """
94
- url = f"{BASE_URL}/geographical-references/cadastral-parcels.json"
95
- return make_api_request(url, endpoint_name="Cadastral Parcels")
96
-
97
- @mcp.tool()
98
- async def get_cadastral_parcel_by_id(parcel_id: str) -> str:
99
- """Get specific cadastral parcel information by ID.
100
-
101
- Args:
102
- parcel_id: The cadastral parcel ID to fetch information for
103
-
104
- Returns:
105
- JSON string containing cadastral parcel data or error information
106
- """
107
- url = f"{BASE_URL}/geographical-references/cadastral-parcels/{parcel_id}.json"
108
- return make_api_request(url, endpoint_name=f"Cadastral Parcel {parcel_id}")
109
-
110
- @mcp.tool()
111
- async def get_cadastral_parcel_geolocation(parcel_id: str) -> str:
112
- """Get geolocation data for a specific cadastral parcel in GeoJSON format.
113
-
114
- Args:
115
- parcel_id: The cadastral parcel ID to fetch geolocation data for
116
-
117
- Returns:
118
- JSON string containing cadastral parcel geolocation data or error information
119
- """
120
- url = f"{BASE_URL}/geographical-references/cadastral-parcels/{parcel_id}/geolocation.geojson"
121
- return make_api_request(url, endpoint_name=f"Cadastral Parcel {parcel_id} Geolocation")
122
-
123
- # GEOGRAPHICAL REFERENCES - CAP PARCELS
124
- @mcp.tool()
125
- async def get_cap_parcels() -> str:
126
- """Get list of all CAP (Common Agricultural Policy) parcels.
127
-
128
- Returns:
129
- JSON string containing CAP parcels data or error information
130
- """
131
- url = f"{BASE_URL}/geographical-references/cap-parcels.json"
132
- return make_api_request(url, endpoint_name="CAP Parcels")
133
-
134
- @mcp.tool()
135
- async def get_cap_parcel_by_id(cap_id: str) -> str:
136
- """Get specific CAP parcel information by ID.
137
-
138
- Args:
139
- cap_id: The CAP parcel ID to fetch information for
140
-
141
- Returns:
142
- JSON string containing CAP parcel data or error information
143
- """
144
- url = f"{BASE_URL}/geographical-references/cap-parcels/{cap_id}.json"
145
- return make_api_request(url, endpoint_name=f"CAP Parcel {cap_id}")
146
-
147
- @mcp.tool()
148
- async def get_cap_parcel_geolocation(cap_id: str) -> str:
149
- """Get geolocation data for a specific CAP parcel in GeoJSON format.
150
-
151
- Args:
152
- cap_id: The CAP parcel ID to fetch geolocation data for
153
-
154
- Returns:
155
- JSON string containing CAP parcel geolocation data or error information
156
- """
157
- url = f"{BASE_URL}/geographical-references/cap-parcels/{cap_id}/geolocation.geojson"
158
- return make_api_request(url, endpoint_name=f"CAP Parcel {cap_id} Geolocation")
159
-
160
- # GEOGRAPHICAL REFERENCES - MUNICIPALITIES
161
- @mcp.tool()
162
- async def get_municipalities() -> str:
163
- """Get list of all municipalities.
164
-
165
- Returns:
166
- JSON string containing municipalities data or error information
167
- """
168
- url = f"{BASE_URL}/geographical-references/municipalities.json"
169
- return make_api_request(url, endpoint_name="Municipalities")
170
-
171
- @mcp.tool()
172
- async def get_municipality_by_id(municipality_id: str) -> str:
173
- """Get specific municipality information by ID.
174
-
175
- Args:
176
- municipality_id: The municipality ID to fetch information for
177
-
178
- Returns:
179
- JSON string containing municipality data or error information
180
- """
181
- url = f"{BASE_URL}/geographical-references/municipalities/{municipality_id}.json"
182
- return make_api_request(url, endpoint_name=f"Municipality {municipality_id}")
183
-
184
- @mcp.tool()
185
- async def get_municipality_cadastre(municipality_id: str) -> str:
186
- """Get cadastre data for a specific municipality in GeoJSON format.
187
-
188
- Args:
189
- municipality_id: The municipality ID to fetch cadastre data for
190
-
191
- Returns:
192
- JSON string containing municipality cadastre data or error information
193
- """
194
- url = f"{BASE_URL}/geographical-references/municipalities/{municipality_id}/cadastre.geojson"
195
- return make_api_request(url, endpoint_name=f"Municipality {municipality_id} Cadastre")
196
-
197
- @mcp.tool()
198
- async def get_municipality_cap_parcels(municipality_id: str) -> str:
199
- """Get CAP parcels data for a specific municipality in GeoJSON format.
200
-
201
- Args:
202
- municipality_id: The municipality ID to fetch CAP parcels data for
203
-
204
- Returns:
205
- JSON string containing municipality CAP parcels data or error information
206
- """
207
- url = f"{BASE_URL}/geographical-references/municipalities/{municipality_id}/cap-parcels.geojson"
208
- return make_api_request(url, endpoint_name=f"Municipality {municipality_id} CAP Parcels")
209
-
210
- # PRODUCTION
211
- @mcp.tool()
212
- async def get_productions() -> str:
213
- """Get list of all agricultural productions.
214
-
215
- Returns:
216
- JSON string containing productions data or error information
217
- """
218
- url = f"{BASE_URL}/production/productions.json"
219
- return make_api_request(url, endpoint_name="Productions")
220
-
221
- # PHYTOSANITARY
222
- @mcp.tool()
223
- async def get_phytosanitary_cropsets() -> str:
224
- """Get list of phytosanitary cropsets.
225
-
226
- Returns:
227
- JSON string containing cropsets data or error information
228
- """
229
- url = f"{BASE_URL}/phytosanitary/cropsets.json"
230
- return make_api_request(url, endpoint_name="Phytosanitary Cropsets")
231
-
232
- @mcp.tool()
233
- async def get_phytosanitary_products() -> str:
234
- """Get list of phytosanitary products.
235
-
236
- Returns:
237
- JSON string containing phytosanitary products data or error information
238
- """
239
- url = f"{BASE_URL}/phytosanitary/products.json"
240
- return make_api_request(url, endpoint_name="Phytosanitary Products")
241
-
242
- @mcp.tool()
243
- async def get_phytosanitary_symbols() -> str:
244
- """Get list of phytosanitary symbols.
245
-
246
- Returns:
247
- JSON string containing phytosanitary symbols data or error information
248
- """
249
- url = f"{BASE_URL}/phytosanitary/symbols.json"
250
- return make_api_request(url, endpoint_name="Phytosanitary Symbols")
251
-
252
- # SEEDS
253
- @mcp.tool()
254
- async def get_seed_varieties() -> str:
255
- """Get list of seed varieties.
256
-
257
- Returns:
258
- JSON string containing seed varieties data or error information
259
- """
260
- url = f"{BASE_URL}/seeds/varieties.json"
261
- return make_api_request(url, endpoint_name="Seed Varieties")
262
-
263
- # VITICULTURE
264
- @mcp.tool()
265
- async def get_vine_varieties() -> str:
266
- """Get list of vine varieties.
267
-
268
- Returns:
269
- JSON string containing vine varieties data or error information
270
- """
271
- url = f"{BASE_URL}/viticulture/vine-varieties.json"
272
- return make_api_request(url, endpoint_name="Vine Varieties")
273
-
274
- # WEATHER - Enhanced existing tools
275
- @mcp.tool()
276
- async def list_weather_stations() -> str:
277
- """Get a list of available weather stations.
278
-
279
- Returns:
280
- JSON string containing available stations or error information
281
- """
282
- url = f"{BASE_URL}/weather/stations.json"
283
- return make_api_request(url, endpoint_name="Weather Stations")
284
-
285
- @mcp.tool()
286
- async def get_weather_station_info(station_code: str) -> str:
287
- """Get information about a specific weather station.
288
-
289
- Args:
290
- station_code: The weather station code/ID to fetch information for
291
-
292
- Returns:
293
- JSON string containing weather station information or error information
294
- """
295
- url = f"{BASE_URL}/weather/stations/{station_code}.json"
296
- return make_api_request(url, endpoint_name=f"Weather Station {station_code}")
297
 
298
  @mcp.tool()
299
  async def get_weather_data(station_code: str, page: int = 1, start: str = None, end: str = None) -> str:
@@ -308,15 +22,14 @@ async def get_weather_data(station_code: str, page: int = 1, start: str = None,
308
  Returns:
309
  JSON string containing weather data or error information
310
  """
311
- url = f"{BASE_URL}/weather/stations/{station_code}/hourly-reports.json"
 
312
 
313
- params = {}
314
- if page != 1:
315
- params["page"] = page
316
- if start:
317
- params["start"] = start
318
- if end:
319
- params["end"] = end
320
 
321
  try:
322
  response = requests.get(url, params=params, timeout=30)
@@ -336,7 +49,7 @@ async def get_weather_data(station_code: str, page: int = 1, start: str = None,
336
  return json.dumps({
337
  "type": "error",
338
  "station_code": station_code,
339
- "message": f"Could not connect to weather API. Please ensure the weather service is running."
340
  })
341
  except requests.exceptions.Timeout:
342
  return json.dumps({
 
3
  import sys
4
  import io
5
  import requests
 
6
 
7
  sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
8
  sys.stderr = io.TextIOWrapper(sys.stderr.buffer, encoding='utf-8', errors='replace')
9
 
10
+ mcp = FastMCP("weather_api_server")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
 
12
  @mcp.tool()
13
  async def get_weather_data(station_code: str, page: int = 1, start: str = None, end: str = None) -> str:
 
22
  Returns:
23
  JSON string containing weather data or error information
24
  """
25
+ base_url = "https://lexicon.osfarm.org/weather/stations"
26
+ url = f"{base_url}/{station_code}/hourly-reports.json"
27
 
28
+ params = {
29
+ "page": page,
30
+ "start": start if start else "null",
31
+ "end": end if end else "null"
32
+ }
 
 
33
 
34
  try:
35
  response = requests.get(url, params=params, timeout=30)
 
49
  return json.dumps({
50
  "type": "error",
51
  "station_code": station_code,
52
+ "message": f"Could not connect to weather API at localhost:8888. Please ensure the weather service is running."
53
  })
54
  except requests.exceptions.Timeout:
55
  return json.dumps({