LATE.IO2 / backend /digs_engine /run_digs_daemon.py
AIEONE
Initial commit syncing local server with Hugging Face Space
490ec84
#!/usr/bin/env python3
import os
import time
from datetime import datetime
from digest_segment import process_segment
from export_to_spreadsheet import export_to_csv
SEGMENT_PATH = os.path.join(os.path.dirname(__file__), "digest_paths")
CSV_PATH = os.path.join(os.path.dirname(__file__), "digs_results.csv")
PROCESSED_LOG = os.path.join(os.path.dirname(__file__), ".digested_files.log")
def load_processed():
if os.path.exists(PROCESSED_LOG):
with open(PROCESSED_LOG, 'r') as f:
return set(line.strip() for line in f.readlines())
return set()
def save_processed(filename):
with open(PROCESSED_LOG, 'a') as f:
f.write(filename + "\n")
def append_to_csv(result):
import csv
exists = os.path.exists(CSV_PATH)
with open(CSV_PATH, 'a', newline='', encoding='utf-8') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=["timestamp", "filename", "char_count", "processed_paths"])
if not exists:
writer.writeheader()
writer.writerow({
"timestamp": datetime.utcnow().isoformat(),
**result
})
def watch_and_digs():
print("DIGS Daemon running. Watching for new .txt files...")
seen = load_processed()
while True:
for file in os.listdir(SEGMENT_PATH):
if file.endswith(".txt") and file not in seen:
full_path = os.path.join(SEGMENT_PATH, file)
print(f"Processing new file: {file}")
try:
result = process_segment(full_path)
if result:
append_to_csv(result)
save_processed(file)
print(f"Done: {file}")
except Exception as e:
print(f"Error processing {file}: {e}")
time.sleep(2)
if __name__ == "__main__":
watch_and_digs()