| |
|
|
| import hashlib |
| import sqlite3 |
| import os |
| import json |
| import uuid |
| import time |
|
|
| from datetime import datetime, timedelta, UTC |
| from werkzeug.security import generate_password_hash, check_password_hash |
| from tools.identity import generate_did |
| from tools.crypto import generate_keypair |
|
|
| SCRIPTS_BASE_PATH = "scripts" |
|
|
| class Storage: |
| def __init__(self, config=None): |
| self.config = config or {} |
| db_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "agent_data.db")) |
| self.db_path = db_path |
| self.conn = sqlite3.connect(db_path, check_same_thread=False) |
| self.conn.row_factory = sqlite3.Row |
| self._init_db() |
|
|
| def _init_db(self): |
| |
| sql_file = os.path.join(os.path.dirname(__file__), "db_structure.sql") |
| with open(sql_file, "r", encoding="utf-8") as f: |
| sql_script = f.read() |
| c = self.conn.cursor() |
| c.executescript(sql_script) |
| self.conn.commit() |
|
|
| |
|
|
| def write_diary_entry(self, text, tags=None): |
| timestamp = datetime.now(UTC).isoformat() |
| tag_str = ",".join(tags) if tags else "" |
| self.conn.execute( |
| 'INSERT INTO diary_entries (text, tags, timestamp) VALUES (?, ?, ?)', |
| (text, tag_str, timestamp) |
| ) |
| self.conn.commit() |
|
|
| def read_diary_entries(self, limit=10, tag_filter=None): |
| cursor = self.conn.cursor() |
| if tag_filter: |
| if isinstance(tag_filter, list): |
| tag_filter = ",".join(tag_filter) |
| like_expr = f"%{tag_filter}%" |
| cursor.execute( |
| 'SELECT * FROM diary_entries WHERE tags LIKE ? ORDER BY id DESC LIMIT ?', |
| (like_expr, limit) |
| ) |
| else: |
| cursor.execute('SELECT * FROM diary_entries ORDER BY id DESC LIMIT ?', (limit,)) |
| return cursor.fetchall() |
|
|
| def search_diary_by_time_range(self, from_ts, to_ts): |
| cursor = self.conn.cursor() |
| cursor.execute( |
| 'SELECT * FROM diary_entries WHERE timestamp BETWEEN ? AND ? ORDER BY timestamp DESC', |
| (from_ts, to_ts) |
| ) |
| return cursor.fetchall() |
|
|
| def delete_diary_entry_by_id(self, entry_id): |
| self.conn.execute('DELETE FROM diary_entries WHERE id = ?', (entry_id,)) |
| self.conn.commit() |
|
|
| def get_diary_tag_stats(self): |
| cursor = self.conn.cursor() |
| cursor.execute('SELECT tags FROM diary_entries') |
| tag_counts = {} |
| for row in cursor.fetchall(): |
| tags = row[0].split(",") if row[0] else [] |
| for tag in tags: |
| tag = tag.strip() |
| if tag: |
| tag_counts[tag] = tag_counts.get(tag, 0) + 1 |
| return tag_counts |
|
|
| def export_diary_entries(self): |
| cursor = self.conn.cursor() |
| cursor.execute('SELECT id, text, tags, timestamp FROM diary_entries ORDER BY id ASC') |
| return cursor.fetchall() |
| |
| |
|
|
| def add_concept(self, name, description=None): |
| timestamp = datetime.now(UTC).isoformat() |
| self.conn.execute( |
| 'INSERT INTO concepts (name, description, timestamp) VALUES (?, ?, ?)', |
| (name, description, timestamp) |
| ) |
| self.conn.commit() |
|
|
| def get_concept_by_name(self, name): |
| cursor = self.conn.cursor() |
| cursor.execute('SELECT * FROM concepts WHERE name = ?', (name,)) |
| return cursor.fetchone() |
|
|
| def list_concepts(self): |
| cursor = self.conn.cursor() |
| cursor.execute('SELECT * FROM concepts ORDER BY id ASC') |
| return cursor.fetchall() |
|
|
| |
|
|
| def add_link(self, from_name, to_name, relation_type): |
| from_concept = self.get_concept_by_name(from_name) |
| to_concept = self.get_concept_by_name(to_name) |
| if not from_concept or not to_concept: |
| raise ValueError("Один или оба концепта не найдены") |
| from_id = from_concept[0] |
| to_id = to_concept[0] |
| timestamp = datetime.now(UTC).isoformat() |
| self.conn.execute( |
| 'INSERT INTO links (from_concept_id, to_concept_id, relation_type, timestamp) VALUES (?, ?, ?, ?)', |
| (from_id, to_id, relation_type, timestamp) |
| ) |
| self.conn.commit() |
|
|
| def get_links_for_concept(self, concept_name): |
| concept = self.get_concept_by_name(concept_name) |
| if not concept: |
| return [] |
| concept_id = concept[0] |
| cursor = self.conn.cursor() |
| cursor.execute( |
| 'SELECT * FROM links WHERE from_concept_id = ? OR to_concept_id = ?', |
| (concept_id, concept_id) |
| ) |
| return cursor.fetchall() |
|
|
| |
|
|
| def expand_concept_graph(self, start_id, depth): |
| visited = set() |
| results = [] |
|
|
| def dfs(node_id, level): |
| if level > depth or node_id in visited: |
| return |
| visited.add(node_id) |
| cursor = self.conn.execute( |
| 'SELECT from_concept_id, to_concept_id, relation_type FROM links WHERE from_concept_id=?', |
| (node_id,) |
| ) |
| for row in cursor.fetchall(): |
| results.append(row) |
| dfs(row[1], level + 1) |
| |
| dfs(start_id, 0) |
| return results |
|
|
| def delete_concept_by_id(self, concept_id): |
| self.conn.execute('DELETE FROM concepts WHERE id = ?', (concept_id,)) |
| self.conn.execute('DELETE FROM links WHERE from_concept_id = ? OR to_concept_id = ?', (concept_id, concept_id)) |
| self.conn.commit() |
|
|
| def delete_link_by_id(self, link_id): |
| self.conn.execute('DELETE FROM links WHERE id = ?', (link_id,)) |
| self.conn.commit() |
|
|
| def export_semantic_graph(self): |
| cursor = self.conn.cursor() |
| cursor.execute('SELECT id, name, description FROM concepts ORDER BY id ASC') |
| concepts = cursor.fetchall() |
|
|
| cursor.execute('SELECT id, from_concept_id, to_concept_id, relation_type FROM links ORDER BY id ASC') |
| links = cursor.fetchall() |
|
|
| return {"concepts": concepts, "links": links} |
|
|
| def update_concept_fields(self, concept_id, name=None, description=None): |
| cursor = self.conn.cursor() |
| if name is not None: |
| cursor.execute('UPDATE concepts SET name = ? WHERE id = ?', (name, concept_id)) |
| if description is not None: |
| cursor.execute('UPDATE concepts SET description = ? WHERE id = ?', (description, concept_id)) |
| self.conn.commit() |
|
|
| def search_links_by_relation(self, relation): |
| cursor = self.conn.cursor() |
| cursor.execute( |
| 'SELECT id, from_concept_id, to_concept_id, relation_type FROM links WHERE relation LIKE ?', |
| (f"%{relation}%",) |
| ) |
| return cursor.fetchall() |
|
|
| def search_concepts(self, query): |
| cursor = self.conn.execute( |
| '''SELECT id, name, description FROM concepts |
| WHERE name LIKE ? OR description LIKE ?''', |
| (f"%{query}%", f"%{query}%") |
| ) |
| return cursor.fetchall() |
|
|
| def merge_concepts(self, source_id, target_id): |
| cursor = self.conn.cursor() |
| cursor.execute('UPDATE links SET source_id = ? WHERE source_id = ?', (target_id, source_id)) |
| cursor.execute('UPDATE links SET target_id = ? WHERE target_id = ?', (target_id, source_id)) |
| self.delete_concept_by_id(source_id) |
| self.conn.commit() |
|
|
| def get_concept_id_by_name(self, name): |
| cursor = self.conn.execute('SELECT id FROM concepts WHERE name = ?', (name,)) |
| row = cursor.fetchone() |
| return row[0] if row else None |
| |
| |
|
|
| |
| |
|
|
| def get_notes_by_tags(self, limit=10, tag_filter=None): |
| cursor = self.conn.cursor() |
| if tag_filter: |
| if isinstance(tag_filter, list): |
| tag_filter = ",".join(tag_filter) |
| like_expr = f"%{tag_filter}%" |
| cursor.execute( |
| 'SELECT * FROM notes WHERE tags LIKE ? ORDER BY id DESC LIMIT ?', |
| (like_expr, limit) |
| ) |
| else: |
| cursor.execute('SELECT * FROM notes ORDER BY id DESC LIMIT ?', (limit,)) |
| return cursor.fetchall() |
|
|
| def get_notes_after(self, since_ts): |
| cursor = self.conn.cursor() |
| cursor.execute( |
| 'SELECT id, text, source, timestamp FROM notes WHERE timestamp > ? ORDER BY timestamp', |
| (since_ts,) |
| ) |
| return cursor.fetchall() |
|
|
| def get_first_unread_note(self): |
| cursor = self.conn.cursor() |
| cursor.execute( |
| "SELECT id, text, source, timestamp, tags FROM notes WHERE read = 0 ORDER BY id ASC LIMIT 1" |
| ) |
| return cursor.fetchone() |
|
|
| def mark_note_as_read(self, note_id: int): |
| self.conn.execute( |
| "UPDATE notes SET read = 1 WHERE id = ?", |
| (note_id,) |
| ) |
| self.conn.commit() |
|
|
| def set_tags(self, note_id: int, tags: list[str]): |
| tag_str = ",".join(tags) |
| self.conn.execute( |
| "UPDATE notes SET tags = ? WHERE id = ?", |
| (tag_str, note_id) |
| ) |
| self.conn.commit() |
|
|
| def get_random_note_by_tags(self, include_tags: list[str]): |
| cursor = self.conn.cursor() |
| like_clauses = " OR ".join(["tags LIKE ?"] * len(include_tags)) |
| values = [f"%{tag}%" for tag in include_tags] |
| query = f""" |
| SELECT id, text, source, timestamp, tags |
| FROM notes |
| WHERE ({like_clauses}) |
| ORDER BY RANDOM() |
| LIMIT 1 |
| """ |
| cursor.execute(query, values) |
| return cursor.fetchone() |
|
|
| |
|
|
| def get_llm_recent_responses(self, limit=20, llm_id=None): |
| c = self.conn.cursor() |
| query = "SELECT role, content FROM llm_recent_responses" |
| if llm_id: |
| query += " WHERE llm_id = ?" |
| query += " ORDER BY timestamp DESC LIMIT ?" |
| c.execute(query, (llm_id, limit)) |
| else: |
| query += " ORDER BY timestamp DESC LIMIT ?" |
| c.execute(query, (limit,)) |
| return c.fetchall() |
|
|
| def add_llm_memory(self, content, title=None, tags=None, llm_id=None): |
| c = self.conn.cursor() |
| c.execute(''' |
| INSERT INTO llm_memory (title, content, tags, llm_id) |
| VALUES (?, ?, ?, ?) |
| ''', (title, content, tags, llm_id)) |
| self.conn.commit() |
|
|
| def add_llm_recent_response(self, role, content, llm_id=None): |
| c = self.conn.cursor() |
| c.execute(''' |
| INSERT INTO llm_recent_responses (role, content, llm_id) |
| VALUES (?, ?, ?) |
| ''', (role, content, llm_id)) |
| self.conn.commit() |
|
|
| |
|
|
| def get_all_agent_scripts(self): |
| c = self.conn.cursor() |
| c.execute("SELECT id, name, version, language, description, tags, created_at, updated_at FROM agent_scripts") |
| return c.fetchall() |
|
|
| def get_agent_script_by_name(self, name, version=None): |
| """Возвращает скрипт с подгруженным кодом из файла, если он был сохранён через @path""" |
| c = self.conn.cursor() |
| if version: |
| c.execute("SELECT * FROM agent_scripts WHERE name = ? AND version = ?", (name, version)) |
| else: |
| c.execute(""" |
| SELECT * FROM agent_scripts |
| WHERE name = ? |
| ORDER BY updated_at DESC |
| LIMIT 1 |
| """, (name,)) |
| row = c.fetchone() |
| if not row: |
| return None |
|
|
| row = list(row) |
| code_entry = row[3] |
|
|
| if code_entry.strip().startswith("@path="): |
| rel_path = code_entry.strip().split("=", 1)[1] |
| full_path = os.path.join(SCRIPT_ROOT, rel_path) |
| if os.path.isfile(full_path): |
| with open(full_path, "r", encoding="utf-8") as f: |
| row[3] = f.read() |
| else: |
| row[3] = f"# Error: Script file not found at {full_path}" |
|
|
| return tuple(row) |
|
|
| def add_agent_script(self, name, version, code, description="", tags="", language="python", llm_id=None): |
| c = self.conn.cursor() |
| try: |
| c.execute(""" |
| INSERT INTO agent_scripts (name, version, code, description, tags, language, llm_id) |
| VALUES (?, ?, ?, ?, ?, ?, ?) |
| """, (name, version, code, description, tags, language, llm_id)) |
| self.conn.commit() |
| return True |
| except sqlite3.IntegrityError: |
| return False |
|
|
| def update_agent_script(self, name, version, code=None, description=None, tags=None): |
| c = self.conn.cursor() |
| fields = [] |
| values = [] |
|
|
| if code is not None: |
| fields.append("code = ?") |
| values.append(code) |
| if description is not None: |
| fields.append("description = ?") |
| values.append(description) |
| if tags is not None: |
| fields.append("tags = ?") |
| values.append(tags) |
|
|
| if not fields: |
| return False |
|
|
| fields.append("updated_at = CURRENT_TIMESTAMP") |
| query = f"UPDATE agent_scripts SET {', '.join(fields)} WHERE name = ? AND version = ?" |
| values.extend([name, version]) |
|
|
| c.execute(query, values) |
| self.conn.commit() |
| return c.rowcount > 0 |
|
|
| def delete_agent_script(self, name, version=None): |
| c = self.conn.cursor() |
| if version: |
| c.execute("DELETE FROM agent_scripts WHERE name = ? AND version = ?", (name, version)) |
| else: |
| c.execute("DELETE FROM agent_scripts WHERE name = ?", (name,)) |
| self.conn.commit() |
| return c.rowcount > 0 |
| |
| |
|
|
| def log_process_event(self, name, value=None, tags=None, status='ok', priority=0, llm_id=None): |
| c = self.conn.cursor() |
| c.execute(''' |
| INSERT INTO process_log (name, value, tags, status, priority, llm_id) |
| VALUES (?, ?, ?, ?, ?, ?) |
| ''', (name, value, tags, status, priority, llm_id)) |
| self.conn.commit() |
|
|
| def get_recent_logs(self, limit=50, status_filter=None): |
| c = self.conn.cursor() |
| query = 'SELECT * FROM process_log' |
| params = [] |
|
|
| if status_filter: |
| query += ' WHERE status = ?' |
| params.append(status_filter) |
|
|
| query += ' ORDER BY timestamp DESC LIMIT ?' |
| params.append(limit) |
|
|
| c.execute(query, tuple(params)) |
| return c.fetchall() |
|
|
| |
|
|
| def register_agent_table(self, table_name, schema, description=None, llm_id=None): |
| c = self.conn.cursor() |
| c.execute(''' |
| INSERT OR IGNORE INTO agent_tables (table_name, description, schema, llm_id) |
| VALUES (?, ?, ?, ?) |
| ''', (table_name, description, schema, llm_id)) |
| self.conn.commit() |
|
|
| def get_agent_tables(self): |
| c = self.conn.cursor() |
| c.execute('SELECT * FROM agent_tables ORDER BY created_at DESC') |
| return c.fetchall() |
|
|
| |
|
|
| def delete_script_file(name, version): |
| """Удаляет файл скрипта, если он существует""" |
| path = os.path.join(SCRIPT_ROOT, name, f"v{version}", "script.py") |
| if os.path.isfile(path): |
| os.remove(path) |
| |
| def resolve_script_path(name, version): |
| return os.path.join(SCRIPTS_BASE_PATH, name, f"v{version}", "script.py") |
|
|
| def register_agent_script(self, name, version, code, language='python', description=None, tags=None, llm_id=None): |
| c = self.conn.cursor() |
|
|
| if code.strip().startswith("@path="): |
| |
| path = code.strip().split("=", 1)[1] |
| code_entry = f"@path={path}" |
| else: |
| |
| path = resolve_script_path(name, version) |
| os.makedirs(os.path.dirname(path), exist_ok=True) |
| with open(path, "w", encoding="utf-8") as f: |
| f.write(code) |
| code_entry = f"@path={name}/v{version}/script.py" |
|
|
| c.execute(''' |
| INSERT OR REPLACE INTO agent_scripts (name, version, code, language, description, tags, llm_id) |
| VALUES (?, ?, ?, ?, ?, ?, ?) |
| ''', (name, version, code_entry, language, description, tags, llm_id)) |
| self.conn.commit() |
|
|
| def get_agent_script_code(self, name, version=None): |
| """Возвращает только код (из БД или файла)""" |
| row = self.get_agent_script_by_name(name, version) |
| if not row: |
| return None |
| code_entry = row["code_or_path"] |
| if code_entry.strip().startswith("@path="): |
| rel_path = code_entry.strip().split("=", 1)[1] |
| full_path = os.path.join(SCRIPTS_BASE_PATH, rel_path) |
| if os.path.isfile(full_path): |
| with open(full_path, "r", encoding="utf-8") as f: |
| return f.read() |
| else: |
| return f"# Error: File not found at path: {full_path}" |
| return code_entry |
|
|
| def list_agent_scripts(limit=50): |
| c = self.conn.cursor() |
| c.execute("SELECT * FROM agent_scripts ORDER BY updated_at DESC LIMIT ?", (limit,)) |
| return c.fetchall() |
|
|
| def get_latest_agent_script(self, name): |
| c = self.conn.cursor() |
| c.execute(''' |
| SELECT * FROM agent_scripts |
| WHERE name = ? |
| ORDER BY updated_at DESC |
| LIMIT 1 |
| ''', (name,)) |
| return c.fetchone() |
|
|
| def search_agent_scripts_by_tag(self, tag): |
| c = self.conn.cursor() |
| c.execute("SELECT * FROM agent_scripts WHERE tags LIKE ?", (f"%{tag}%",)) |
| return c.fetchall() |
|
|
| def ensure_script_path(name, version): |
| """Создаёт папку scripts/{name}/v{version}/ если не существует""" |
| path = os.path.join(SCRIPT_ROOT, name, f"v{version}") |
| os.makedirs(path, exist_ok=True) |
| return os.path.join(path, "script.py") |
|
|
| def save_script_to_file(code, name, version): |
| """Сохраняет скрипт в файл и возвращает путь""" |
| file_path = ensure_script_path(name, version) |
| with open(file_path, "w", encoding="utf-8") as f: |
| f.write(code) |
| return file_path |
|
|
| def update_agent_script(self, name, version, code=None, description=None, tags=None, mode="inline"): |
| """ |
| mode: 'inline' (сохранять код в БД), 'file' (в файл, в БД — @path=...) |
| """ |
| c = self.conn.cursor() |
|
|
| |
| c.execute("SELECT code FROM agent_scripts WHERE name = ? AND version = ?", (name, version)) |
| result = c.fetchone() |
| if not result: |
| return False |
| old_code = result[0] |
|
|
| fields = [] |
| values = [] |
|
|
| |
| if code is not None: |
| old_is_file = old_code.strip().startswith("@path=") |
|
|
| if mode == "file": |
| file_path = save_script_to_file(code, name, version) |
| rel_path = os.path.relpath(file_path, SCRIPT_ROOT) |
| code_ref = f"@path={rel_path}" |
| fields.append("code = ?") |
| values.append(code_ref) |
|
|
| |
|
|
| else: |
| fields.append("code = ?") |
| values.append(code) |
|
|
| |
| if old_is_file: |
| delete_script_file(name, version) |
|
|
| if description is not None: |
| fields.append("description = ?") |
| values.append(description) |
|
|
| if tags is not None: |
| fields.append("tags = ?") |
| values.append(tags) |
|
|
| if not fields: |
| return False |
|
|
| fields.append("updated_at = ?") |
| values.append(datetime.now(UTC).isoformat()) |
|
|
| values.extend([name, version]) |
| query = f""" |
| UPDATE agent_scripts |
| SET {', '.join(fields)} |
| WHERE name = ? AND version = ? |
| """ |
|
|
| c.execute(query, values) |
| self.conn.commit() |
| return c.rowcount > 0 |
| |
| |
|
|
| def register_llm(self, llm_id, name=None, description=None): |
| c = self.conn.cursor() |
| c.execute(''' |
| INSERT OR REPLACE INTO llm_registry (id, name, description) |
| VALUES (?, ?, ?) |
| ''', (llm_id, name, description)) |
| self.conn.commit() |
|
|
| def get_registered_llms(self): |
| c = self.conn.cursor() |
| c.execute('SELECT * FROM llm_registry ORDER BY registered_at DESC') |
| return c.fetchall() |
|
|
| |
|
|
| def add_diary_relation(self, source_id, target_id, relation, strength=1.0, context=None): |
| c = self.conn.cursor() |
| c.execute(''' |
| INSERT INTO diary_graph_index (source_entry_id, target_entry_id, relation, strength, context) |
| VALUES (?, ?, ?, ?, ?) |
| ''', (source_id, target_id, relation, strength, context)) |
| self.conn.commit() |
|
|
| def get_diary_relations(self, entry_id): |
| c = self.conn.cursor() |
| c.execute(''' |
| SELECT * FROM diary_graph_index |
| WHERE source_entry_id = ? OR target_entry_id = ? |
| ORDER BY timestamp DESC |
| ''', (entry_id, entry_id)) |
| return c.fetchall() |
|
|
| |
| def set_config(self, key, value): |
| cursor = self.conn.cursor() |
| cursor.execute(''' |
| INSERT INTO config (key, value) |
| VALUES (?, ?) |
| ON CONFLICT(key) DO UPDATE SET value = excluded.value |
| ''', (key, value)) |
| self.conn.commit() |
|
|
| def add_identity(self, identity): |
| cursor = self.conn.cursor() |
| cursor.execute(''' |
| INSERT OR REPLACE INTO identity (id, name, pubkey, privkey, metadata, created_at, updated_at) |
| VALUES (?, ?, ?, ?, ?, ?, ?) |
| ''', ( |
| identity['id'], |
| identity['name'], |
| identity['pubkey'], |
| identity['privkey'], |
| identity.get('metadata', ''), |
| identity['created_at'], |
| identity['updated_at'] |
| )) |
| self.conn.commit() |
|
|
| def add_llm(self, llm): |
| cursor = self.conn.cursor() |
| config_json = json.dumps(llm, ensure_ascii=False) |
| cursor.execute(''' |
| INSERT OR REPLACE INTO llm_registry (id, name, description, config_json) |
| VALUES (?, ?, ?, ?) |
| ''', ( |
| llm['name'], |
| llm['name'], |
| llm.get('description', ''), |
| config_json |
| )) |
| self.conn.commit() |
|
|
| def clear_llm_registry(self): |
| cursor = self.conn.cursor() |
| cursor.execute('DELETE FROM llm_registry') |
| self.conn.commit() |
|
|
| def add_user(self, user): |
| cursor = self.conn.cursor() |
| cursor.execute(''' |
| INSERT OR REPLACE INTO users ( |
| username, badges, did, mail, password_hash, |
| info, contacts, language, operator, ban |
| ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) |
| ''', ( |
| user.get('username'), |
| user.get('badges'), |
| user.get('did'), |
| user.get('mail'), |
| user.get('password_hash'), |
| user.get('info'), |
| user.get('contacts'), |
| user.get('language'), |
| int(user.get('operator', 0)), |
| user.get('ban') |
| )) |
| self.conn.commit() |
| return cursor.lastrowid |
|
|
| |
| def update_heartbeat(self, name: str): |
| now = datetime.now(UTC).isoformat() |
| self.conn.execute( |
| "INSERT INTO main_process (name, heartbeat, stop) VALUES (?, ?, 0) " |
| "ON CONFLICT(name) DO UPDATE SET heartbeat = excluded.heartbeat", |
| (name, now) |
| ) |
| self.conn.commit() |
|
|
| def check_stop_flag(self, name: str) -> bool: |
| cursor = self.conn.execute("SELECT stop FROM main_process WHERE name = ?", (name,)) |
| row = cursor.fetchone() |
| if row and row[0] == 1: |
| self.conn.execute("UPDATE main_process SET stop = 0 WHERE name = ?", (name,)) |
| self.conn.commit() |
| return True |
| return False |
|
|
| def is_process_alive(self, name: str, max_delay=180): |
| cursor = self.conn.execute("SELECT heartbeat FROM main_process WHERE name=?", (name,)) |
| row = cursor.fetchone() |
| if row: |
| try: |
| last_beat = datetime.fromisoformat(row[0]) |
| return (datetime.now(UTC) - last_beat).total_seconds() < max_delay |
| except: |
| return False |
| return False |
|
|
| |
| def get_config_value(self, key: str, default=None): |
| cursor = self.conn.execute("SELECT value FROM config WHERE key = ?", (key,)) |
| row = cursor.fetchone() |
| if not row: |
| return default |
|
|
| value = row[0] |
|
|
| |
| try: |
| parsed = json.loads(value) |
| if isinstance(parsed, (str, int, float, bool, dict, list)): |
| return parsed |
| except (json.JSONDecodeError, TypeError): |
| pass |
|
|
| |
| if isinstance(value, str) and len(value) >= 2 and value[0] == '"' and value[-1] == '"': |
| return value[1:-1] |
|
|
| return value |
|
|
| |
| def write_note(self, content, user_did="anon", source="user", hidden=0): |
| timestamp = datetime.now(UTC).isoformat() |
| self.conn.execute(""" |
| INSERT INTO notes (text, user_did, source, timestamp, hidden) |
| VALUES (?, ?, ?, ?, ?) |
| """, (content, user_did, source, timestamp, hidden)) |
| self.conn.commit() |
|
|
| def write_note_returning_id(self, content, user_did, agent_did, source="user", hidden=False, code=None, mentions="[]", hashtags="[]"): |
| cursor = self.conn.cursor() |
| cursor.execute(""" |
| INSERT INTO notes (timestamp, text, code, mentions, hashtags, user_did, agent_did, source, hidden) |
| VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) |
| """, (time.time(), content, code, mentions, hashtags, user_did, agent_did, source, int(hidden))) |
| self.conn.commit() |
| return cursor.lastrowid |
|
|
| def save_attachment(self, message_id, filename, mime_type, content): |
| cursor = self.conn.cursor() |
| cursor.execute(""" |
| INSERT INTO attachments (message_id, filename, mime_type, size, binary) |
| VALUES (?, ?, ?, ?, ?) |
| """, (message_id, filename, mime_type, len(content), content)) |
| self.conn.commit() |
|
|
| def get_attachment_by_id(self, file_id): |
| cursor = self.conn.cursor() |
| cursor.execute(""" |
| SELECT id, filename, mime_type, size, binary |
| FROM attachments |
| WHERE id = ? |
| """, (file_id,)) |
| row = cursor.fetchone() |
| return dict(row) if row else None |
|
|
| def get_notes(self, limit=50, user_did="anon", only_personal=False): |
| cursor = self.conn.cursor() |
|
|
| if only_personal: |
| |
| query = """ |
| SELECT n.id, n.text, n.code, n.source, n.user_did, u.username, n.timestamp, n.hidden |
| FROM notes n |
| LEFT JOIN users u ON n.user_did = u.did |
| WHERE n.user_did = ? AND n.hidden = 1 |
| ORDER BY n.timestamp DESC |
| LIMIT ? |
| """ |
| cursor.execute(query, (user_did, limit)) |
| else: |
| |
| query = """ |
| SELECT n.id, n.text, n.code, n.source, n.user_did, u.username, u.badges, n.timestamp, n.hidden |
| FROM notes n |
| LEFT JOIN users u ON n.user_did = u.did |
| WHERE n.user_did = ? |
| OR ((n.source = 'user' OR n.source = 'llm' OR n.source = 'cli') AND n.hidden = 0) |
| ORDER BY n.timestamp DESC |
| LIMIT ? |
| """ |
| cursor.execute(query, (user_did, limit)) |
|
|
| result = [dict(row) for row in cursor.fetchall()] |
|
|
| for note in result: |
| note["attachments"] = self.get_attachments_for_note(note["id"]) |
|
|
| return result |
|
|
| def get_attachments_for_note(self, message_id): |
| cursor = self.conn.cursor() |
| cursor.execute(""" |
| SELECT id, filename, mime_type, size FROM attachments |
| WHERE message_id = ? |
| """, (message_id,)) |
| return [dict(row) for row in cursor.fetchall()] |
|
|
| |
| def register_user(self, username: str, mail: str, password: str) -> bool: |
| mail = mail.lower() |
| did = generate_did() |
| try: |
| self.conn.execute( |
| "INSERT INTO users (username, mail, password_hash, did) VALUES (?, ?, ?, ?)", |
| (username, mail, generate_password_hash(password), did) |
| ) |
| self.conn.commit() |
| return True |
| except sqlite3.IntegrityError: |
| return False |
|
|
| def authenticate_user(self, mail: str, password: str) -> bool: |
| mail = mail.lower() |
| cursor = self.conn.cursor() |
| cursor.execute( |
| "SELECT password_hash FROM users WHERE mail = ?", |
| (mail,) |
| ) |
| result = cursor.fetchone() |
| if result: |
| return check_password_hash(result["password_hash"], password) |
| return False |
|
|
| def is_banned(self, user_did): |
| result = self.conn.execute(""" |
| SELECT ban |
| FROM users |
| WHERE did = ? |
| """, (user_did,)).fetchone() |
|
|
| if result and result["ban"]: |
| return datetime.fromisoformat(result["ban"]) > datetime.now(UTC) |
|
|
| return False |
|
|
| def get_user_info(self, mail: str) -> dict | None: |
| mail = mail.lower() |
| cursor = self.conn.cursor() |
| cursor.execute( |
| "SELECT username, did FROM users WHERE mail = ?", |
| (mail,) |
| ) |
| result = cursor.fetchone() |
| if result: |
| return { |
| "username": result["username"], |
| "did": result["did"] |
| } |
| return None |
|
|
| def get_user_info_by_did(self, did: str) -> dict | None: |
| cursor = self.conn.cursor() |
| cursor.execute( |
| "SELECT username, mail, operator FROM users WHERE did = ?", |
| (did,) |
| ) |
| result = cursor.fetchone() |
| if result: |
| return { |
| "username": result["username"], |
| "mail": result["mail"], |
| "operator": result["operator"] |
| } |
| return None |
|
|
| |
|
|
| def close(self): |
| self.conn.close() |
|
|