Spaces:
Sleeping
Sleeping
| from flask import Flask, render_template, request, jsonify, send_file | |
| import os | |
| import json | |
| import uuid | |
| import shutil | |
| from datetime import datetime | |
| app = Flask(__name__) | |
| app.config['MAX_CONTENT_LENGTH'] = 5 * 1024 * 1024 # 5MB Limit | |
| # Configuration | |
| DATA_DIR = os.path.join(os.path.dirname(__file__), 'data') | |
| SOP_FILE = os.path.join(DATA_DIR, 'sops.json') | |
| DEFAULT_SOP_FILE = os.path.join(os.path.dirname(__file__), 'sops_default.json') | |
| os.makedirs(DATA_DIR, exist_ok=True) | |
| # Initialize data file if not exists | |
| if not os.path.exists(SOP_FILE): | |
| if os.path.exists(DEFAULT_SOP_FILE): | |
| try: | |
| shutil.copy(DEFAULT_SOP_FILE, SOP_FILE) | |
| print(f"Initialized {SOP_FILE} from default data.") | |
| except Exception as e: | |
| print(f"Error copying default data: {e}") | |
| # Fallback to empty list | |
| with open(SOP_FILE, 'w', encoding='utf-8') as f: | |
| json.dump([], f) | |
| else: | |
| with open(SOP_FILE, 'w', encoding='utf-8') as f: | |
| json.dump([], f) | |
| # Error Handlers | |
| def not_found_error(error): | |
| if request.path.startswith('/api/'): | |
| return jsonify({'error': '资源未找到'}), 404 | |
| return render_template('index.html'), 404 | |
| def internal_error(error): | |
| if request.path.startswith('/api/'): | |
| return jsonify({'error': '服务器内部错误'}), 500 | |
| return "服务器内部错误", 500 | |
| def request_entity_too_large(error): | |
| return jsonify({'error': '文件过大 (最大 5MB)'}), 413 | |
| class DataStore: | |
| def load_sops(): | |
| try: | |
| with open(SOP_FILE, 'r', encoding='utf-8') as f: | |
| return json.load(f) | |
| except Exception as e: | |
| print(f"Error loading SOPs: {e}") | |
| return [] | |
| def save_sops(sops): | |
| try: | |
| with open(SOP_FILE, 'w', encoding='utf-8') as f: | |
| json.dump(sops, f, ensure_ascii=False, indent=2) | |
| return True | |
| except Exception as e: | |
| print(f"Error saving SOPs: {e}") | |
| return False | |
| def index(): | |
| return render_template('index.html') | |
| def get_sops(): | |
| sops = DataStore.load_sops() | |
| # Sort by updated_at desc | |
| sops.sort(key=lambda x: x.get('updated_at', ''), reverse=True) | |
| return jsonify(sops) | |
| def create_sop(): | |
| data = request.json | |
| sops = DataStore.load_sops() | |
| new_sop = { | |
| 'id': str(uuid.uuid4()), | |
| 'title': data.get('title', '未命名 SOP'), | |
| 'category': data.get('category', '通用'), | |
| 'version': '1.0', | |
| 'status': 'draft', # draft, review, published | |
| 'created_at': datetime.now().isoformat(), | |
| 'updated_at': datetime.now().isoformat(), | |
| 'content': { | |
| 'purpose': '', | |
| 'scope': '', | |
| 'prerequisites': [], | |
| 'roles': [], | |
| 'steps': [], # Array of {id, title, description, role} | |
| 'flowchart': '', # Mermaid syntax | |
| 'troubleshooting': [] | |
| } | |
| } | |
| sops.append(new_sop) | |
| DataStore.save_sops(sops) | |
| return jsonify(new_sop) | |
| def update_sop(sop_id): | |
| data = request.json | |
| sops = DataStore.load_sops() | |
| for sop in sops: | |
| if sop['id'] == sop_id: | |
| sop.update(data) | |
| sop['updated_at'] = datetime.now().isoformat() | |
| DataStore.save_sops(sops) | |
| return jsonify(sop) | |
| return jsonify({'error': 'SOP not found'}), 404 | |
| def delete_sop(sop_id): | |
| sops = DataStore.load_sops() | |
| sops = [s for s in sops if s['id'] != sop_id] | |
| DataStore.save_sops(sops) | |
| return jsonify({'success': True}) | |
| def import_sop(): | |
| if 'file' not in request.files: | |
| return jsonify({'error': '未找到文件部分'}), 400 | |
| file = request.files['file'] | |
| if file.filename == '': | |
| return jsonify({'error': '未选择文件'}), 400 | |
| if not file.filename.lower().endswith('.json'): | |
| return jsonify({'error': '文件类型无效。仅支持 JSON 格式。'}), 400 | |
| try: | |
| # Read file content | |
| content = file.read() | |
| if len(content) > 5 * 1024 * 1024: # 5MB limit | |
| return jsonify({'error': '文件过大 (超过 5MB)'}), 413 | |
| # Null byte check (Binary check) | |
| if b'\0' in content: | |
| return jsonify({'error': '检测到二进制内容或空字节'}), 400 | |
| # Parse JSON | |
| try: | |
| json_content = json.loads(content.decode('utf-8')) | |
| except UnicodeDecodeError: | |
| return jsonify({'error': '文件编码错误。需要 UTF-8 编码。'}), 400 | |
| except json.JSONDecodeError: | |
| return jsonify({'error': 'JSON 格式无效'}), 400 | |
| # Validate structure (Basic) | |
| if isinstance(json_content, list): | |
| items = json_content | |
| elif isinstance(json_content, dict): | |
| items = [json_content] | |
| else: | |
| return jsonify({'error': '无效的 JSON 结构。必须是列表或对象。'}), 400 | |
| valid_sops = [] | |
| for item in items: | |
| # Basic validation | |
| if 'title' in item and 'content' in item: | |
| # Ensure ID is unique or generate new one | |
| if 'id' not in item: | |
| item['id'] = str(uuid.uuid4()) | |
| valid_sops.append(item) | |
| if not valid_sops: | |
| return jsonify({'error': '文件中未发现有效的 SOP 数据'}), 400 | |
| # Save | |
| current_sops = DataStore.load_sops() | |
| # Strategy: Append. If ID exists, overwrite? Let's just append/overwrite based on ID. | |
| existing_ids = {s['id']: i for i, s in enumerate(current_sops)} | |
| count = 0 | |
| for sop in valid_sops: | |
| if sop['id'] in existing_ids: | |
| current_sops[existing_ids[sop['id']]] = sop | |
| else: | |
| current_sops.append(sop) | |
| count += 1 | |
| DataStore.save_sops(current_sops) | |
| return jsonify({'success': True, 'count': count, 'message': f'成功导入 {count} 个 SOP'}) | |
| except Exception as e: | |
| print(f"Import error: {e}") | |
| return jsonify({'error': str(e)}), 500 | |
| def export_sop(sop_id): | |
| # This could be a specialized export endpoint, | |
| # but for now frontend handles most exports. | |
| # Placeholder for future server-side PDF generation if needed. | |
| pass | |
| if __name__ == '__main__': | |
| # Use environment variable to control debug mode, default to True for local dev | |
| debug_mode = os.environ.get('FLASK_DEBUG', 'true').lower() == 'true' | |
| app.run(host='0.0.0.0', port=7860, debug=debug_mode) | |