Spaces:
Sleeping
Sleeping
| import google.generativeai as genai | |
| from flask import Response, stream_with_context | |
| import json | |
| import time | |
| def craft_notebook_prompt(user_prompt): | |
| """Enhance the user prompt with instructions for generating a well-structured Jupyter notebook.""" | |
| enhanced_prompt = f""" | |
| Create a complete Jupyter notebook based on this request: "{user_prompt}" | |
| Please structure your response as follows: | |
| NOTEBOOK_NAME: [Short, descriptive name for the notebook with no formating "**"] | |
| NOTEBOOK_DESCRIPTION: [a description of the notebook's purpose with no formating] | |
| Then provide the complete notebook with proper alternating Markdown and code cells. | |
| Format each cell as follows: | |
| --- MARKDOWN CELL --- | |
| [Markdown content here] | |
| --- CODE CELL --- | |
| ```python | |
| [Python code here] | |
| ``` | |
| Important guidelines: | |
| - Include comprehensive explanations in Markdown cells | |
| - Ensure all code is executable and properly commented | |
| - Include data loading, processing, and visualization where appropriate | |
| - Add explanatory text before and after code sections | |
| - Include example outputs or expected results when relevant | |
| - Structure the notebook with clear section headers in Markdown | |
| """ | |
| return enhanced_prompt | |
| def craft_edit_prompt(edit_request, notebook_json): | |
| """Create a prompt for editing an existing notebook.""" | |
| # Extract cells from notebook JSON for context | |
| cells = [] | |
| for i, cell in enumerate(notebook_json.get('cells', [])): | |
| cell_type = cell.get('cell_type', 'unknown') | |
| source = cell.get('source', []) | |
| if isinstance(source, list): | |
| source = '\n'.join(source) | |
| if cell_type == 'markdown': | |
| cells.append(f"--- CELL {i+1} (MARKDOWN) ---\n{source}") | |
| elif cell_type == 'code': | |
| cells.append(f"--- CELL {i+1} (CODE) ---\n```python\n{source}\n```") | |
| notebook_content = '\n\n'.join(cells) | |
| # Create prompt with edit instructions | |
| enhanced_prompt = f""" | |
| I have a Jupyter notebook that I'd like you to modify based on this edit request: "{edit_request}" | |
| Here's the current notebook content: | |
| NOTEBOOK_STRUCTURE: | |
| {notebook_content} | |
| Please provide the complete updated notebook with the requested changes, following the same format: | |
| NOTEBOOK_NAME: [Keep or update the notebook name] | |
| NOTEBOOK_DESCRIPTION: [Keep or update the notebook description] | |
| Then provide the complete notebook with proper alternating Markdown and code cells. | |
| Format each cell as follows: | |
| --- MARKDOWN CELL --- | |
| [Markdown content here] | |
| --- CODE CELL --- | |
| ```python | |
| [Python code here] | |
| ``` | |
| Important guidelines: | |
| - Make only the changes requested in the edit request | |
| - Preserve the overall structure of the notebook | |
| - Keep all content from the original notebook that doesn't need modification | |
| - Ensure all code remains executable and properly commented | |
| - Feel free to reorganize, add, or remove cells as needed to fulfill the edit request | |
| """ | |
| return enhanced_prompt | |
| def generate_notebook(user_prompt, model_name="gemini-2.0-pro-exp-02-05"): | |
| """Generate a complete notebook using Gemini API.""" | |
| model = genai.GenerativeModel(model_name) | |
| enhanced_prompt = craft_notebook_prompt(user_prompt) | |
| response = model.generate_content(enhanced_prompt) | |
| return response.text | |
| def edit_notebook(edit_request, notebook_json, model_name="gemini-2.0-pro-exp-02-05"): | |
| """Edit an existing notebook based on user request.""" | |
| model = genai.GenerativeModel(model_name) | |
| enhanced_prompt = craft_edit_prompt(edit_request, notebook_json) | |
| response = model.generate_content(enhanced_prompt) | |
| return response.text | |
| def stream_notebook_generation(user_prompt, model_name="gemini-2.0-pro-exp-02-05"): | |
| """Stream notebook generation responses from Gemini API.""" | |
| model = genai.GenerativeModel(model_name) | |
| enhanced_prompt = craft_notebook_prompt(user_prompt) | |
| def generate(): | |
| try: | |
| response = model.generate_content(enhanced_prompt, stream=True) | |
| # Send a notification that streaming has started | |
| yield f"data: {json.dumps({'chunk': 'Starting notebook generation...'})}\n\n" | |
| for chunk in response: | |
| try: | |
| # More robust empty chunk detection | |
| if not hasattr(chunk, 'parts') or not chunk.parts: | |
| # Skip this empty chunk and continue | |
| print("Warning: Empty chunk received (no parts)") | |
| continue | |
| # First try the standard text property | |
| try: | |
| if hasattr(chunk, 'text') and chunk.text: | |
| yield f"data: {json.dumps({'chunk': chunk.text})}\n\n" | |
| continue # If we successfully got text, continue to next chunk | |
| except (AttributeError, IndexError): | |
| # If accessing text property fails, we'll try extracting from parts | |
| pass | |
| # If we're here, we couldn't get text directly, try to extract from parts | |
| for part in chunk.parts: | |
| # Extract text from part using different approaches | |
| if hasattr(part, 'text') and part.text: | |
| yield f"data: {json.dumps({'chunk': part.text})}\n\n" | |
| elif isinstance(part, dict) and 'text' in part: | |
| yield f"data: {json.dumps({'chunk': part['text']})}\n\n" | |
| elif hasattr(part, 'string_value'): | |
| yield f"data: {json.dumps({'chunk': part.string_value})}\n\n" | |
| except (AttributeError, IndexError, TypeError) as e: | |
| # Log the error but continue - don't break the stream | |
| print(f"Error processing chunk: {e}, chunk structure: {repr(chunk)[:200]}") | |
| continue | |
| # Briefly pause to prevent overwhelming the client | |
| time.sleep(0.01) | |
| yield f"data: {json.dumps({'done': True})}\n\n" | |
| except Exception as e: | |
| # Send error to client and close stream | |
| error_message = f"Error generating notebook: {str(e)}" | |
| print(error_message) | |
| yield f"data: {json.dumps({'error': error_message})}\n\n" | |
| yield f"data: {json.dumps({'done': True})}\n\n" | |
| return Response(stream_with_context(generate()), content_type="text/event-stream") | |
| def stream_notebook_edit(edit_request, notebook_json, model_name="gemini-2.0-pro-exp-02-05"): | |
| """Stream notebook editing responses from Gemini API.""" | |
| model = genai.GenerativeModel(model_name) | |
| enhanced_prompt = craft_edit_prompt(edit_request, notebook_json) | |
| def generate(): | |
| try: | |
| response = model.generate_content(enhanced_prompt, stream=True) | |
| # Send a notification that editing has started | |
| yield f"data: {json.dumps({'chunk': 'Starting notebook edit...'})}\n\n" | |
| for chunk in response: | |
| try: | |
| # More robust empty chunk detection | |
| if not hasattr(chunk, 'parts') or not chunk.parts: | |
| # Skip this empty chunk and continue | |
| print("Warning: Empty chunk received (no parts)") | |
| continue | |
| # First try the standard text property | |
| try: | |
| if hasattr(chunk, 'text') and chunk.text: | |
| yield f"data: {json.dumps({'chunk': chunk.text})}\n\n" | |
| continue # If we successfully got text, continue to next chunk | |
| except (AttributeError, IndexError): | |
| # If accessing text property fails, we'll try extracting from parts | |
| pass | |
| # If we're here, we couldn't get text directly, try to extract from parts | |
| for part in chunk.parts: | |
| # Extract text from part using different approaches | |
| if hasattr(part, 'text') and part.text: | |
| yield f"data: {json.dumps({'chunk': part.text})}\n\n" | |
| elif isinstance(part, dict) and 'text' in part: | |
| yield f"data: {json.dumps({'chunk': part['text']})}\n\n" | |
| elif hasattr(part, 'string_value'): | |
| yield f"data: {json.dumps({'chunk': part.string_value})}\n\n" | |
| except (AttributeError, IndexError, TypeError) as e: | |
| # Log the error but continue - don't break the stream | |
| print(f"Error processing chunk: {e}, chunk structure: {repr(chunk)[:200]}") | |
| continue | |
| # Briefly pause to prevent overwhelming the client | |
| time.sleep(0.01) | |
| yield f"data: {json.dumps({'done': True})}\n\n" | |
| except Exception as e: | |
| # Send error to client and close stream | |
| error_message = f"Error editing notebook: {str(e)}" | |
| print(error_message) | |
| yield f"data: {json.dumps({'error': error_message})}\n\n" | |
| yield f"data: {json.dumps({'done': True})}\n\n" | |
| return Response(stream_with_context(generate()), content_type="text/event-stream") | |