Fetii commited on
Commit
fbec1e4
·
verified ·
1 Parent(s): b4ef132

Upload 3 files

Browse files
Files changed (3) hide show
  1. app.py +99 -0
  2. backend.py +214 -0
  3. requierements.txt +7 -0
app.py ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # ==========================================
2
+ # FinTalk - Gradio UI
3
+ # ==========================================
4
+ import gradio as gr
5
+ import asyncio
6
+ from backend import fintalk_discussion, export_to_pdf, generate_tts_files # senin fonksiyonların
7
+
8
+ # -----------------------------------------------------
9
+ # Tartışmayı başlatan ana fonksiyon
10
+ # -----------------------------------------------------
11
+ def run_fintalk(topic_text):
12
+ if not topic_text or len(topic_text.strip()) < 10:
13
+ return "Please provide a valid economic topic.", "", "", "", "", None, None
14
+
15
+ result = fintalk_discussion(topic_text)
16
+ result["topic"] = topic_text
17
+
18
+ # PDF oluştur
19
+ pdf_path = "FinTalk_Report.pdf"
20
+ export_to_pdf(result, pdf_path)
21
+
22
+ # TTS (async)
23
+ try:
24
+ asyncio.run(generate_tts_files(result))
25
+ except Exception as e:
26
+ print("TTS hatası:", e)
27
+
28
+ return (
29
+ f"🧩 **Moderator:**\n{result['moderator_intro']}",
30
+ f"💹 **Bullish Investor:**\n{result['bullish_view']}",
31
+ f"📉 **Bearish Economist:**\n{result['bearish_view']}",
32
+ f"🎙️ **Moderator Wrap-up:**\n{result['moderator_wrap']}",
33
+ f"📊 **GPT Summary:**\n{result['summary']}",
34
+ pdf_path,
35
+ "moderator_intro.mp3",
36
+ "bullish_view.mp3",
37
+ "bearish_view.mp3",
38
+ "moderator_wrap.mp3"
39
+ )
40
+
41
+ # -----------------------------------------------------
42
+ # Gradio Arayüz
43
+ # -----------------------------------------------------
44
+ with gr.Blocks(theme=gr.themes.Soft()) as demo:
45
+ gr.Markdown(
46
+ """
47
+ # 📊 **FinTalk — AI Economic Roundtable**
48
+ Simulate a live discussion between two economists with opposing views, moderated by Selin.
49
+ Enter an economic topic or news headline below and click **Start Discussion**.
50
+ """
51
+ )
52
+
53
+ with gr.Row():
54
+ with gr.Column(scale=1):
55
+ topic_input = gr.Textbox(
56
+ label="Enter an Economic Headline or News",
57
+ placeholder="Example: The central bank raised interest rates by 200 basis points.",
58
+ lines=4
59
+ )
60
+ start_btn = gr.Button("🚀 Start Discussion")
61
+
62
+ with gr.Column(scale=2):
63
+ moderator_output = gr.Markdown(label="Moderator Intro")
64
+ bullish_output = gr.Markdown(label="Bullish Investor")
65
+ bearish_output = gr.Markdown(label="Bearish Economist")
66
+ wrap_output = gr.Markdown(label="Moderator Wrap-up")
67
+ summary_output = gr.Markdown(label="GPT Summary")
68
+
69
+ pdf_file = gr.File(label="📄 Download PDF Report")
70
+
71
+ with gr.Row():
72
+ mod_audio = gr.Audio(label="Moderator Voice", interactive=False)
73
+ bull_audio = gr.Audio(label="Bullish Voice", interactive=False)
74
+ bear_audio = gr.Audio(label="Bearish Voice", interactive=False)
75
+ wrap_audio = gr.Audio(label="Moderator Wrap-up", interactive=False)
76
+
77
+ start_btn.click(
78
+ fn=run_fintalk,
79
+ inputs=topic_input,
80
+ outputs=[
81
+ moderator_output,
82
+ bullish_output,
83
+ bearish_output,
84
+ wrap_output,
85
+ summary_output,
86
+ pdf_file,
87
+ mod_audio,
88
+ bull_audio,
89
+ bear_audio,
90
+ wrap_audio
91
+ ]
92
+ )
93
+
94
+
95
+ # -----------------------------------------------------
96
+ # Uygulama Başlat
97
+ # -----------------------------------------------------
98
+ if __name__ == "__main__":
99
+ demo.launch()
backend.py ADDED
@@ -0,0 +1,214 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # ==========================================
2
+ # FinTalk - Backend (llama-cpp + GPT summary)
3
+ # ==========================================
4
+
5
+ import os
6
+ import re
7
+ from typing import Dict
8
+ from dotenv import load_dotenv
9
+ from llama_cpp import Llama
10
+ from openai import OpenAI
11
+ import edge_tts
12
+ import asyncio
13
+ import reportlab
14
+
15
+ load_dotenv()
16
+
17
+ # -----------------------------------------------------
18
+ # 1) MODEL YOLU (GGUF) - KENDİ YOLUNU YAZ
19
+ # -----------------------------------------------------
20
+ model_path = r"C:/Users/cagri/.lmstudio/models/QuantFactory/Llama-3-8B-Instruct-Finance-RAG-GGUF/Llama-3-8B-Instruct-Finance-RAG.Q4_K_S.gguf"
21
+ # -----------------------------------------------------
22
+ # 2) LLAMA-CPP MODEL YÜKLEME
23
+ # n_ctx=4096 yeterli; 8192 istersen RAM artar. Uyarı görürsen önemsemeyebilirsin.
24
+ # -----------------------------------------------------
25
+ llm = Llama(
26
+ model_path=model_path,
27
+ n_ctx=4096,
28
+ n_threads=6,
29
+ n_batch=512,
30
+ verbose=False
31
+ )
32
+
33
+ # -----------------------------------------------------
34
+ # 3) OPENAI (Özetleme)
35
+ # -----------------------------------------------------
36
+ OPENAI_API_KEY = os.getenv("API_KEY")
37
+ if not OPENAI_API_KEY:
38
+ raise RuntimeError("API_KEY bulunamadı. Lütfen .env veya sistem değişkenlerine ekleyin.")
39
+ client = OpenAI(api_key=OPENAI_API_KEY)
40
+ SUMMARY_MODEL = os.getenv("SUMMARY_MODEL", "gpt-4o-mini")
41
+
42
+ # -----------------------------------------------------
43
+ # 4) PERSONA PROMPTLAR (sade)
44
+ # -----------------------------------------------------
45
+ SYSTEM_MODERATOR = (
46
+ "You are Selin, the moderator of an economics roundtable. "
47
+ "Be neutral, brief, and structured. Guide the flow without giving opinions."
48
+ )
49
+
50
+ SYSTEM_BULLISH = (
51
+ """You are Bullish Investor, an optimistic economist who focuses on growth, market confidence, and positive catalysts.
52
+ Be analytical and persuasive. Mention at least two concrete macro or market factors that support your optimism
53
+ (e.g., improved investor sentiment, fiscal stimulus, or sector resilience).
54
+ Respond in 2–3 detailed paragraphs and conclude with one confident takeaway."""
55
+ )
56
+
57
+ SYSTEM_BEARISH = (
58
+ "You are Bearish Economist, a cautious macroeconomist who highlights downside risks "
59
+ "(inflation persistence, liquidity stress, policy uncertainty). Be analytical; end with one cautionary insight."
60
+ )
61
+
62
+ # -----------------------------------------------------
63
+ # 5) YARDIMCI: Post-process (meta notları, personayı ifşa eden satırları temizle)
64
+ # -----------------------------------------------------
65
+ _META_PATTERNS = [
66
+ r"(?i)\bnote:\b.*", # "Note:" ile başlayan meta
67
+ r"(?i)\bi am (selin|bullish|bearish).*$", # "I am ..." persona ifşaları
68
+ r"(?i)\bthis response was written\b.*",
69
+ r"(?i)\bplease review\b.*",
70
+ r"(?i)\bclarity and readability\b.*",
71
+ ]
72
+ def _clean(text: str) -> str:
73
+ cleaned = text.strip()
74
+ for pat in _META_PATTERNS:
75
+ cleaned = re.sub(pat, "", cleaned, flags=re.MULTILINE)
76
+ # aşırı boşlukları toparla
77
+ cleaned = re.sub(r"\n{3,}", "\n\n", cleaned).strip()
78
+ return cleaned
79
+
80
+ # -----------------------------------------------------
81
+ # 6) TEK PERSONA CEVABI (chat completion + context reset)
82
+ # -----------------------------------------------------
83
+ def generate_as(system_prompt: str, user_text: str, max_tokens: int = 480, temperature: float = 0.7) -> str:
84
+ """
85
+ Her çağrıda temiz context: create_chat_completion kullanıyoruz.
86
+ """
87
+ # olası KV cache etkisini azaltmak için reset
88
+ llm.reset()
89
+ out = llm.create_chat_completion(
90
+ messages=[
91
+ {"role": "system", "content": system_prompt},
92
+ {"role": "user", "content": user_text}
93
+ ],
94
+ max_tokens=max_tokens,
95
+ temperature=temperature,
96
+ top_p=0.9,
97
+ repeat_penalty=1.1,
98
+ )
99
+ text = out["choices"][0]["message"]["content"]
100
+ return _clean(text)
101
+
102
+ # -----------------------------------------------------
103
+ # 7) TARTIŞMA AKIŞI
104
+ # -----------------------------------------------------
105
+ def fintalk_discussion(news_text: str) -> Dict[str, str]:
106
+ print("🧩 FinTalk simulation started...\n")
107
+
108
+ # Ortak mesaj zinciri (tek context)
109
+ messages = []
110
+
111
+ # 1️⃣ Selin başlatıyor
112
+ selin_intro = generate_as(SYSTEM_MODERATOR, f"Open the discussion about: {news_text}.")
113
+ messages.append(f"Selin: {selin_intro}")
114
+ print("Moderator Intro:\n", selin_intro, "\n")
115
+
116
+ # 2️⃣ Bullish konuşuyor
117
+ bullish_view = generate_as(
118
+ SYSTEM_BULLISH,
119
+ f"The moderator introduced the topic: {news_text}. Respond with your opening bullish perspective."
120
+ )
121
+ messages.append(f"Bullish Investor: {bullish_view}")
122
+ print("Bullish Investor:\n", bullish_view, "\n")
123
+
124
+ # 3️⃣ Bearish karşılık veriyor
125
+ bearish_view = generate_as(
126
+ SYSTEM_BEARISH,
127
+ f"The moderator introduced the topic: {news_text}. "
128
+ f"The bullish economist said: {bullish_view}\n"
129
+ "Now respond with your cautious analysis."
130
+ )
131
+ messages.append(f"Bearish Economist: {bearish_view}")
132
+ print("Bearish Economist:\n", bearish_view, "\n")
133
+
134
+ # 4️⃣ Selin toparlıyor (konuya referans ver)
135
+ selin_wrap = generate_as(
136
+ SYSTEM_MODERATOR,
137
+ f"Based on the debate about {news_text}, summarize their main differences and close the panel politely."
138
+ )
139
+ messages.append(f"Selin: {selin_wrap}")
140
+ print("Moderator Wrap-up:\n", selin_wrap, "\n")
141
+
142
+ # 5️⃣ GPT özetleme
143
+ debate_text = "\n".join(messages)
144
+ summary_prompt = (
145
+ "Summarize this debate between a bullish and a bearish economist in 5 bullet points. "
146
+ "Keep it grounded in the topic and add a balanced conclusion.\n\n"
147
+ f"{debate_text}"
148
+ )
149
+
150
+ summary_resp = client.chat.completions.create(
151
+ model=SUMMARY_MODEL,
152
+ messages=[
153
+ {"role": "system", "content": "You are an expert economic summarizer."},
154
+ {"role": "user", "content": summary_prompt}
155
+ ]
156
+ )
157
+ final_summary = summary_resp.choices[0].message.content.strip()
158
+ print("📊 GPT Summary:\n", final_summary)
159
+
160
+ return {
161
+ "moderator_intro": selin_intro,
162
+ "bullish_view": bullish_view,
163
+ "bearish_view": bearish_view,
164
+ "moderator_wrap": selin_wrap,
165
+ "summary": final_summary
166
+ }
167
+
168
+ def export_to_pdf(result: dict, filename="FinTalk_Report.pdf"):
169
+ from reportlab.lib.pagesizes import A4
170
+ from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer
171
+ from reportlab.lib.styles import getSampleStyleSheet
172
+
173
+ styles = getSampleStyleSheet()
174
+ doc = SimpleDocTemplate(filename, pagesize=A4)
175
+ story = []
176
+
177
+ def add(title, text):
178
+ story.append(Paragraph(f"<b>{title}</b>", styles["Heading3"]))
179
+ story.append(Paragraph(text.replace("\n", "<br/>"), styles["BodyText"]))
180
+ story.append(Spacer(1, 12))
181
+
182
+ add("Topic", result.get("topic", "—"))
183
+ add("Moderator Intro", result["moderator_intro"])
184
+ add("Bullish Investor", result["bullish_view"])
185
+ add("Bearish Economist", result["bearish_view"])
186
+ add("Moderator Wrap-up", result["moderator_wrap"])
187
+ add("GPT-4 Summary", result["summary"])
188
+
189
+ story.append(Paragraph("<i>Generated by FinTalk – AI Economic Roundtable</i>", styles["Normal"]))
190
+ doc.build(story)
191
+
192
+ async def generate_tts_files(result):
193
+ voices = {
194
+ "moderator_intro": "en-US-AriaNeural",
195
+ "bullish_view": "en-US-GuyNeural",
196
+ "bearish_view": "en-GB-RyanNeural",
197
+ "moderator_wrap": "en-US-AriaNeural"
198
+ }
199
+ for key, voice in voices.items():
200
+ filename = f"{key}.mp3"
201
+ text = result[key]
202
+ await edge_tts.Communicate(text, voice=voice, rate="+0%").save(filename)
203
+ print(f"✅ {filename} oluşturuldu")
204
+
205
+ # -----------------------------------------------------
206
+ # 8) Hızlı Test
207
+ # -----------------------------------------------------
208
+ if __name__ == "__main__":
209
+ topic = input("What's discussion topic ?\n>")
210
+ result = fintalk_discussion(topic)
211
+ result["topic"] = topic
212
+ export_to_pdf(result)
213
+ asyncio.run(generate_tts_files(result))
214
+
requierements.txt ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ gradio
2
+ llama-cpp-python==0.2.85
3
+ reportlab
4
+ openai
5
+ edge-tts
6
+ python-dotenv
7
+ huggingface_hub