Pavaas commited on
Commit
d7e6b6a
·
verified ·
1 Parent(s): 3e833dd

Update config.py

Browse files
Files changed (1) hide show
  1. config.py +12 -6
config.py CHANGED
@@ -1,4 +1,4 @@
1
- import fitz # PyMuPDF
2
  import easyocr
3
  import whisper
4
  import tempfile
@@ -11,6 +11,8 @@ import csv
11
  from transformers import pipeline
12
  import streamlit as st
13
 
 
 
14
  def process_pdf(path):
15
  text = ""
16
  try:
@@ -67,13 +69,15 @@ def process_youtube(url):
67
  ydl.download([url])
68
  return process_audio(audio_path)
69
 
 
 
70
  def load_llm_swarm():
71
  return {
72
- "fast": pipeline("text2text-generation", model="google/flan-t5-small", max_length=64),
73
- "bio": pipeline("text2text-generation", model="microsoft/BioGPT-Large", tokenizer="microsoft/BioGPT-Large"),
74
- "deep": pipeline("text2text-generation", model="tiiuae/falcon-7b-instruct"),
75
- "mistral": pipeline("text2text-generation", model="mistralai/Mistral-7B-Instruct"),
76
- "fallback": pipeline("text2text-generation", model="MBZUAI/LaMini-Flan-T5-783M")
77
  }
78
 
79
  llm_swarm = load_llm_swarm()
@@ -123,6 +127,8 @@ def generate_flashcards(text, types=["Q&A"], max_cards=100):
123
 
124
  return cards
125
 
 
 
126
  def export_to_csv(cards, filename="batanki_cards.csv"):
127
  with open(filename, "w", newline="", encoding="utf-8") as f:
128
  writer = csv.writer(f)
 
1
+ import fitz
2
  import easyocr
3
  import whisper
4
  import tempfile
 
11
  from transformers import pipeline
12
  import streamlit as st
13
 
14
+ # === Input Processing ===
15
+
16
  def process_pdf(path):
17
  text = ""
18
  try:
 
69
  ydl.download([url])
70
  return process_audio(audio_path)
71
 
72
+ # === AI Swarm (CPU-Optimized) ===
73
+
74
  def load_llm_swarm():
75
  return {
76
+ "fast": pipeline("text2text-generation", model="google/flan-t5-small"),
77
+ "bio": pipeline("text2text-generation", model="mrm8488/t5-base-finetuned-question-generation-ap"),
78
+ "deep": pipeline("text2text-generation", model="google/flan-t5-base"),
79
+ "mistral": pipeline("text2text-generation", model="google/flan-t5-large"),
80
+ "fallback": pipeline("text2text-generation", model="MBZUAI/LaMini-Flan-T5-248M")
81
  }
82
 
83
  llm_swarm = load_llm_swarm()
 
127
 
128
  return cards
129
 
130
+ # === Export Functions ===
131
+
132
  def export_to_csv(cards, filename="batanki_cards.csv"):
133
  with open(filename, "w", newline="", encoding="utf-8") as f:
134
  writer = csv.writer(f)