| import gradio as gr | |
| import os | |
| def main(): | |
| """Main Gradio interface for CELESTIAL Training Notebooks""" | |
| with gr.Blocks(title="CELESTIAL AI Training Notebooks", theme=gr.themes.Soft()) as demo: | |
| gr.Markdown(""" | |
| # π CELESTIAL AI Training Notebooks v2.0 | |
| ## Complete Spiritual AI Training with Mistral-7B | |
| This Space contains comprehensive Jupyter notebooks for training CELESTIAL's spiritual AI: | |
| ### π **Available Notebooks:** | |
| 1. **π€ Main Training Notebook** - `CELESTIAL_Mistral_7B_Training.ipynb` | |
| - Complete Mistral-7B fine-tuning with LoRA | |
| - 9,000+ training examples across 50+ spiritual features | |
| - Production-ready training pipeline | |
| 2. **π― Domain Adapters Notebook** - `CELESTIAL_Domain_Adapters_Training.ipynb` | |
| - Hot-swappable LoRA adapters for specialized expertise | |
| - 5 domain-specific adapters (Astrology, Krishna, Vastu, Panchang, Remedies) | |
| - Adapter testing and deployment | |
| ### π **Quick Start:** | |
| 1. **Open in Google Colab**: Click the Colab links below | |
| 2. **Login to Hugging Face**: Authenticate in the notebook | |
| 3. **Run All Cells**: Complete training pipeline | |
| 4. **Download Models**: Get your trained CELESTIAL AI | |
| ### π₯ **Download Notebooks:** | |
| - [π Main Training Notebook](https://huggingface.co/spaces/dp1812/celestial-training-notebooks-v2/blob/main/CELESTIAL_Mistral_7B_Training.ipynb) - Download and open in Colab | |
| - [π Domain Adapters Notebook](https://huggingface.co/spaces/dp1812/celestial-training-notebooks-v2/blob/main/CELESTIAL_Domain_Adapters_Training.ipynb) - Download and open in Colab | |
| ### π **How to Use:** | |
| 1. **Download** the notebook files from the links above | |
| 2. **Upload** to Google Colab or open locally in Jupyter | |
| 3. **Run** all cells to complete training | |
| 4. **Download** your trained CELESTIAL AI model | |
| ### π **Training Features:** | |
| - **Base Model**: Mistral-7B-Instruct-v0.3 | |
| - **Training Data**: 9,000+ examples from comprehensive spiritual dataset | |
| - **Method**: LoRA (Low-Rank Adaptation) for efficient fine-tuning | |
| - **Domains**: Astrology, Vastu, Numerology, Divine Guidance, Remedies | |
| - **Output**: Production-ready spiritual AI with hot-swappable adapters | |
| ### π― **Expected Results:** | |
| - **Trained Model**: Fine-tuned Mistral-7B for spiritual guidance | |
| - **5 Domain Adapters**: Specialized expertise for different spiritual domains | |
| - **Evaluation Metrics**: Comprehensive testing across all features | |
| - **Deployment Ready**: Models uploaded to Hugging Face Hub | |
| ### π **Performance Targets:** | |
| - **Groundedness**: >95% factual accuracy using tool results | |
| - **Usefulness**: >90% actionable spiritual guidance | |
| - **Latency**: <300ms first token generation | |
| - **Coverage**: 100% of CELESTIAL's 50+ features | |
| --- | |
| *Built with β€οΈ for spiritual AI and ancient wisdom.* | |
| """) | |
| with gr.Row(): | |
| gr.HTML(""" | |
| <div style="text-align: center; padding: 20px;"> | |
| <h3>π Ready to Train Your Spiritual AI?</h3> | |
| <p><strong>Step 1:</strong> Download the notebook files above</p> | |
| <p><strong>Step 2:</strong> Upload to Google Colab</p> | |
| <p><strong>Step 3:</strong> Run all cells to train your AI!</p> | |
| <br> | |
| <p><em>Training takes 2-4 hours and produces a world-class spiritual AI</em></p> | |
| </div> | |
| """) | |
| return demo | |
| if __name__ == "__main__": | |
| demo = main() | |
| demo.launch() | |