Spaces:
Running
Running
| """ | |
| GeoAI Coding Agent - Main Application | |
| ====================================== | |
| A Geospatial AI Coding Assistant powered by Qwen2.5-Coder-7B-Instruct. | |
| Specialized in GDAL, Rasterio, GeoPandas, and geospatial development. | |
| """ | |
| import gradio as gr | |
| from huggingface_hub import InferenceClient | |
| import re | |
| import json | |
| import tempfile | |
| import os | |
| from datetime import datetime | |
| from typing import Generator, Tuple, Optional | |
| # Get HF token from environment variable (HF Spaces secret) | |
| HF_TOKEN = os.environ.get("HF_TOKEN", "") | |
| from config import ( | |
| MODEL_ID, | |
| MAX_NEW_TOKENS, | |
| TEMPERATURE, | |
| TOP_P, | |
| REPETITION_PENALTY, | |
| SUPPORTED_LANGUAGES, | |
| SYSTEM_PROMPT, | |
| EXAMPLE_PROMPTS, | |
| MARKDOWN_TEMPLATE, | |
| ) | |
| def create_client() -> Optional[InferenceClient]: | |
| """Create HuggingFace Inference client with environment token.""" | |
| if not HF_TOKEN: | |
| return None | |
| try: | |
| return InferenceClient(model=MODEL_ID, token=HF_TOKEN) | |
| except Exception as e: | |
| print(f"Error creating client: {e}") | |
| return None | |
| def extract_code_blocks(text: str) -> list[dict]: | |
| """Extract code blocks with language info from response.""" | |
| pattern = r"```(\w+)?\n(.*?)```" | |
| matches = re.findall(pattern, text, re.DOTALL) | |
| blocks = [] | |
| for lang, code in matches: | |
| lang = lang.lower() if lang else "python" | |
| if lang in SUPPORTED_LANGUAGES: | |
| blocks.append({ | |
| "language": lang, | |
| "code": code.strip(), | |
| "extension": SUPPORTED_LANGUAGES[lang]["extension"] | |
| }) | |
| else: | |
| # Default to python if unknown | |
| blocks.append({ | |
| "language": "python", | |
| "code": code.strip(), | |
| "extension": ".py" | |
| }) | |
| return blocks | |
| def generate_response( | |
| message: str, | |
| history: list, | |
| ) -> Generator[str, None, None]: | |
| """Generate streaming response from the model.""" | |
| if not HF_TOKEN: | |
| yield "⚠️ **Error**: Server configuration error. Please contact the administrator." | |
| return | |
| client = create_client() | |
| if client is None: | |
| yield "⚠️ **Error**: Failed to initialize the model client. Please try again later." | |
| return | |
| # Build messages for the API | |
| messages = [{"role": "system", "content": SYSTEM_PROMPT}] | |
| # Add conversation history (Gradio 6.x uses dict format with role/content) | |
| for msg in history: | |
| if isinstance(msg, dict): | |
| messages.append({"role": msg["role"], "content": msg["content"]}) | |
| # Add current message | |
| messages.append({"role": "user", "content": message}) | |
| try: | |
| response_text = "" | |
| stream = client.chat_completion( | |
| messages=messages, | |
| max_tokens=MAX_NEW_TOKENS, | |
| temperature=TEMPERATURE, | |
| top_p=TOP_P, | |
| stream=True, | |
| ) | |
| for chunk in stream: | |
| if chunk.choices and chunk.choices[0].delta.content: | |
| token = chunk.choices[0].delta.content | |
| response_text += token | |
| yield response_text | |
| except Exception as e: | |
| error_msg = str(e) | |
| if "401" in error_msg or "unauthorized" in error_msg.lower(): | |
| yield "⚠️ **Authentication Error**: Invalid HuggingFace token. Please check your API token." | |
| elif "429" in error_msg or "rate" in error_msg.lower(): | |
| yield "⚠️ **Rate Limit**: Too many requests. Please wait a moment and try again." | |
| else: | |
| yield f"⚠️ **Error**: {error_msg}" | |
| def create_download_file( | |
| response: str, | |
| query: str, | |
| file_format: str | |
| ) -> Optional[str]: | |
| """Create downloadable file from the response.""" | |
| if not response: | |
| return None | |
| timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") | |
| try: | |
| if file_format == "markdown": | |
| content = MARKDOWN_TEMPLATE.format(query=query, response=response) | |
| filename = f"geoai_response_{timestamp}.md" | |
| elif file_format == "code": | |
| code_blocks = extract_code_blocks(response) | |
| if not code_blocks: | |
| return None | |
| # Use the first code block | |
| block = code_blocks[0] | |
| content = block["code"] | |
| filename = f"geoai_code_{timestamp}{block['extension']}" | |
| else: | |
| return None | |
| # Write to temp file | |
| temp_dir = tempfile.gettempdir() | |
| filepath = os.path.join(temp_dir, filename) | |
| with open(filepath, "w", encoding="utf-8") as f: | |
| f.write(content) | |
| return filepath | |
| except Exception as e: | |
| print(f"Error creating download file: {e}") | |
| return None | |
| def download_as_markdown(response: str, query: str) -> Optional[str]: | |
| """Download response as Markdown.""" | |
| return create_download_file(response, query, "markdown") | |
| def download_as_code(response: str, query: str) -> Optional[str]: | |
| """Download response as code file.""" | |
| return create_download_file(response, query, "code") | |
| # Custom CSS for code editor style | |
| CUSTOM_CSS = """ | |
| /* Full width container */ | |
| .gradio-container { | |
| max-width: 100% !important; | |
| width: 100% !important; | |
| margin: 0 auto !important; | |
| padding: 20px !important; | |
| } | |
| /* Code block styling */ | |
| .prose pre { | |
| background-color: #1e1e1e !important; | |
| border-radius: 8px; | |
| padding: 16px; | |
| overflow-x: auto; | |
| } | |
| .prose code { | |
| font-family: 'JetBrains Mono', 'Fira Code', 'Consolas', monospace !important; | |
| font-size: 14px; | |
| } | |
| /* Chat message styling */ | |
| .message { | |
| font-family: 'Inter', sans-serif; | |
| } | |
| /* Header styling */ | |
| .header-text { | |
| text-align: center; | |
| margin-bottom: 20px; | |
| } | |
| /* Example buttons */ | |
| .example-btn { | |
| font-size: 12px !important; | |
| } | |
| /* Download buttons container */ | |
| .download-container { | |
| display: flex; | |
| gap: 10px; | |
| margin-top: 10px; | |
| } | |
| """ | |
| # Build the Gradio Interface | |
| def create_app(): | |
| """Create and configure the Gradio application.""" | |
| with gr.Blocks( | |
| title="GeoAI Coding Agent", | |
| ) as app: | |
| # Header | |
| gr.Markdown( | |
| """ | |
| # 🌍 GeoAI Coding Agent | |
| ### Geospatial AI Coding Assistant powered by Qwen2.5-Coder-7B | |
| Expert in **GDAL/OGR**, **Rasterio**, **GeoPandas**, **xarray**, and geospatial development. | |
| Fluent in Python, Java, C/C++, JavaScript, TypeScript, and Rust. | |
| """ | |
| ) | |
| # Main Chat Interface | |
| chatbot = gr.Chatbot( | |
| label="GeoAI Conversation", | |
| height=500, | |
| ) | |
| # Input Row | |
| with gr.Row(): | |
| msg_input = gr.Textbox( | |
| label="Your Query", | |
| placeholder="Ask about geospatial coding... (e.g., 'Read GeoTIFF with rasterio and reproject to UTM')", | |
| lines=3, | |
| scale=4, | |
| ) | |
| submit_btn = gr.Button("🚀 Generate", variant="primary", scale=1) | |
| # Example Prompts | |
| gr.Markdown("### 💡 Example Prompts") | |
| with gr.Row(): | |
| example_btns = [] | |
| for i, example in enumerate(EXAMPLE_PROMPTS[:4]): | |
| btn = gr.Button( | |
| example[:50] + "..." if len(example) > 50 else example, | |
| size="sm", | |
| elem_classes=["example-btn"], | |
| ) | |
| example_btns.append((btn, example)) | |
| with gr.Row(): | |
| for i, example in enumerate(EXAMPLE_PROMPTS[4:8]): | |
| btn = gr.Button( | |
| example[:50] + "..." if len(example) > 50 else example, | |
| size="sm", | |
| elem_classes=["example-btn"], | |
| ) | |
| example_btns.append((btn, example)) | |
| # Download Section | |
| gr.Markdown("### 📥 Download Response") | |
| with gr.Row(): | |
| download_md_btn = gr.Button("📄 Markdown", size="sm") | |
| download_code_btn = gr.Button("💻 Code File", size="sm") | |
| download_file = gr.File(label="Download", visible=False) | |
| # State for tracking last response | |
| last_response = gr.State("") | |
| last_query = gr.State("") | |
| # Event Handlers | |
| def user_message(message, history): | |
| """Handle user message submission.""" | |
| if not message.strip(): | |
| return "", history | |
| return "", history + [{"role": "user", "content": message}] | |
| def bot_response(history): | |
| """Generate bot response with streaming.""" | |
| if not history: | |
| return history, "", "" | |
| user_msg = history[-1]["content"] | |
| for response in generate_response( | |
| user_msg, | |
| history[:-1], | |
| ): | |
| yield history + [{"role": "assistant", "content": response}], response, user_msg | |
| def make_set_example(example_text): | |
| """Create a function that returns the example prompt.""" | |
| def set_example(): | |
| return example_text | |
| return set_example | |
| def handle_download_md(response, query): | |
| """Handle markdown download.""" | |
| filepath = download_as_markdown(response, query) | |
| if filepath: | |
| return gr.File(value=filepath, visible=True) | |
| return gr.File(visible=False) | |
| def handle_download_code(response, query): | |
| """Handle code file download.""" | |
| filepath = download_as_code(response, query) | |
| if filepath: | |
| return gr.File(value=filepath, visible=True) | |
| return gr.File(visible=False) | |
| # Wire up events | |
| submit_btn.click( | |
| user_message, | |
| [msg_input, chatbot], | |
| [msg_input, chatbot], | |
| queue=False, | |
| ).then( | |
| bot_response, | |
| [chatbot], | |
| [chatbot, last_response, last_query], | |
| ) | |
| msg_input.submit( | |
| user_message, | |
| [msg_input, chatbot], | |
| [msg_input, chatbot], | |
| queue=False, | |
| ).then( | |
| bot_response, | |
| [chatbot], | |
| [chatbot, last_response, last_query], | |
| ) | |
| # Example button clicks | |
| for btn, example in example_btns: | |
| btn.click(make_set_example(example), inputs=[], outputs=[msg_input]) | |
| # Download button clicks | |
| download_md_btn.click( | |
| handle_download_md, | |
| [last_response, last_query], | |
| [download_file], | |
| ) | |
| download_code_btn.click( | |
| handle_download_code, | |
| [last_response, last_query], | |
| [download_file], | |
| ) | |
| # Footer | |
| gr.Markdown( | |
| """ | |
| --- | |
| *GeoAI Coding Agent - Geospatial AI Coding Assistant* | |
| **Built by:** [rifatSDAS](https://github.com/rifatSDAS) | |
| """ | |
| ) | |
| return app | |
| # Main entry point | |
| if __name__ == "__main__": | |
| app = create_app() | |
| app.queue() | |
| app.launch( | |
| server_name="0.0.0.0", | |
| server_port=7860, | |
| share=False, | |
| show_error=True, | |
| theme=gr.themes.Soft( | |
| primary_hue="blue", | |
| secondary_hue="slate", | |
| neutral_hue="slate", | |
| ), | |
| css=CUSTOM_CSS, | |
| ) | |