CryptoNews2 / app.py
Mrttbn's picture
Update app.py
a31df29 verified
import gradio as gr
import feedparser
import pandas as pd
import numpy as np
import faiss
import matplotlib.pyplot as plt
import re
import os
import json
from collections import Counter
from sentence_transformers import SentenceTransformer
from huggingface_hub import InferenceClient
import warnings
warnings.filterwarnings('ignore')
# ---------------------------------------------------------
# AYARLAR VE GLOBAL DEĞİŞKENLER
# ---------------------------------------------------------
# HF token'ı Spaces Secrets / Environment üzerinden ver:
# HF_TOKEN = os.getenv("HF_TOKEN")
HF_TOKEN = os.getenv("HF_TOKEN")
# Llama-3 Modeli (Serverless Inference API)
LLM_MODEL_ID = "meta-llama/Meta-Llama-3-8B-Instruct"
# Global değişkenler
embedding_model = None
llm_client = None
df = None
index = None
embeddings = None
# ---------------------------------------------------------
# YARDIMCI FONKSİYONLAR
# ---------------------------------------------------------
def _extract_json_from_text(output_text: str):
"""LLM çıktısından JSON objesini yakala."""
if not output_text:
return None
# Markdown code block temizliği
cleaned = output_text.replace("```json", "").replace("```", "").strip()
m = re.search(r"\{.*\}", cleaned, re.DOTALL)
if not m:
return None
try:
return json.loads(m.group())
except Exception:
return None
def get_llama_sentiment(text: str, client: InferenceClient):
"""
Llama-3 ile title sentiment.
return: (label, score)
"""
system_prompt = (
"You are a crypto sentiment analysis expert. Analyze the news title.\n"
"You MUST return a valid JSON object. Do NOT write any introduction or explanation.\n\n"
'Format:\n{"label": "positive", "score": 0.9}\n\n'
'Labels can be: "positive", "negative", "neutral".\n'
"Score is between 0.0 and 1.0."
)
user_prompt = f"News Title: {text}"
try:
response = client.chat.completions.create(
model=LLM_MODEL_ID,
messages=[
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_prompt},
],
max_tokens=120,
temperature=0.1,
)
output_text = (response.choices[0].message.content or "").strip()
# ✅ HATA: text[:20]... yok -> böyle yap
preview = (text[:20] + "...") if (text and len(text) > 20) else (text or "")
print(f"Model Yanıtı ({preview}): {output_text}")
data = _extract_json_from_text(output_text)
if not data:
print(f"⚠️ JSON Bulunamadı. Gelen ham veri: {output_text}")
return "neutral", 0.5
label = str(data.get("label", "neutral")).lower().strip()
score = float(data.get("score", 0.5))
# guardrails
if label not in {"positive", "negative", "neutral"}:
label = "neutral"
score = max(0.0, min(1.0, score))
return label, score
except Exception as e:
print(f"❌ API/Bağlantı Hatası: {str(e)}")
return "neutral", 0.5
# ---------------------------------------------------------
# ANA FONKSİYONLAR
# ---------------------------------------------------------
def initialize_models(token_input):
"""Modelleri ve API İstemcisini Başlat"""
global embedding_model, llm_client, HF_TOKEN
# UI'den token girildiyse onu kullan
if token_input and token_input.strip():
HF_TOKEN = token_input.strip()
if not HF_TOKEN:
return "❌ Hata: Hugging Face Token yok. (Space Secrets'e HF_TOKEN ekle veya buradan gir)"
try:
if embedding_model is None:
embedding_model = SentenceTransformer("all-MiniLM-L6-v2")
if llm_client is None:
llm_client = InferenceClient(token=HF_TOKEN)
return f"✅ Hazır: Embedding + Llama-3 Client ({LLM_MODEL_ID})"
except Exception as e:
return f"❌ Model/Client başlatma hatası: {str(e)}"
def fetch_news():
"""RSS'den haber çek ve Llama-3 ile analiz et"""
global df, index, embeddings, llm_client, embedding_model
if llm_client is None or embedding_model is None:
return "⚠️ Önce 'Bağlantıyı Kur' ile modelleri başlat!", None
RSS_URLS = [
"https://cointelegraph.com/rss",
"https://cryptonews.com/news/feed",
"https://www.coindesk.com/arc/outboundfeeds/rss/",
]
all_entries = []
status_messages = []
for url in RSS_URLS:
try:
feed = feedparser.parse(url)
# Demo hız için 5 haber
for entry in feed.entries[:5]:
all_entries.append(
{
"title": entry.get("title", ""),
"link": entry.get("link", ""),
"published": entry.get("published", ""),
}
)
status_messages.append(f"✓ {url.split('/')[2]} okundu.")
except Exception:
status_messages.append(f"✗ {url} hatası.")
df = pd.DataFrame(all_entries).drop_duplicates(subset="title").reset_index(drop=True)
if len(df) == 0:
return "Haber bulunamadı.", None
status_messages.append("\n🤖 Llama-3 ile analiz yapılıyor (bekleyin)...")
labels = []
scores = []
for title in df["title"].tolist():
lbl, scr = get_llama_sentiment(title, llm_client)
labels.append(lbl)
scores.append(scr)
df["sentiment_label"] = labels
df["sentiment_score"] = scores
# FAISS index (arama)
corpus = df["title"].tolist()
embeddings = embedding_model.encode(corpus, show_progress_bar=False)
dim = embeddings.shape[1]
index = faiss.IndexFlatL2(dim)
index.add(embeddings.astype("float32"))
final_msg = "\n".join(status_messages) + f"\n\n✅ {len(df)} haber analiz edildi."
return final_msg, df[["title", "sentiment_label", "sentiment_score"]].head(10)
def search_similar_news(query, top_k=3):
"""Semantik arama"""
global df, index, embedding_model
if df is None or index is None or embedding_model is None:
return "⚠️ Önce haberleri toplayın!", None
try:
q_embedding = embedding_model.encode([query], show_progress_bar=False)
distances, indices = index.search(q_embedding.astype("float32"), k=min(top_k, len(df)))
results = []
for idx in indices[0]:
news = df.iloc[int(idx)]
results.append(
{
"Başlık": news["title"],
"Llama-3 Görüşü": news["sentiment_label"],
"Güven Skoru": float(news["sentiment_score"]),
"Link": news["link"],
}
)
return f"🔎 '{query}' için sonuçlar:", pd.DataFrame(results)
except Exception as e:
return f"Hata: {str(e)}", None
def analyze_coin_sentiment(coin_name):
"""Coin özel analizi"""
global df
if df is None:
return "⚠️ Veri yok!", None, None
filtered = df[df["title"].str.contains(coin_name, case=False, na=False)]
if len(filtered) == 0:
return f"⚠️ '{coin_name}' hakkında haber yok.", None, None
sentiment_dist = filtered["sentiment_label"].value_counts()
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(10, 4))
color_map = {"positive": "#2ecc71", "negative": "#e74c3c", "neutral": "#95a5a6"}
colors = [color_map.get(x, "#333") for x in sentiment_dist.index]
ax1.bar(sentiment_dist.index, sentiment_dist.values, color=colors)
ax1.set_title(f"{coin_name} Sentiment (Llama-3)")
ax2.pie(sentiment_dist.values, labels=sentiment_dist.index, autopct="%1.1f%%", colors=colors)
plt.tight_layout()
avg_score = float(filtered["sentiment_score"].mean())
report = f"""
### 🤖 Llama-3 Analiz Raporu: {coin_name.upper()}
- **Toplam Haber:** {len(filtered)}
- **Ortalama Güven Skoru:** {avg_score:.2f}
- **Baskın Duygu:** {sentiment_dist.idxmax().upper() if not sentiment_dist.empty else 'N/A'}
"""
return report, fig, filtered[["title", "sentiment_label", "sentiment_score", "link"]]
def create_overview_chart():
"""Genel piyasa durumu"""
global df
if df is None:
return None
fig, ax = plt.subplots(figsize=(8, 5))
counts = df["sentiment_label"].value_counts()
colors = [{"positive": "green", "negative": "red", "neutral": "gray"}.get(x, "gray") for x in counts.index]
ax.bar(counts.index, counts.values, color=colors)
ax.set_title("Genel Piyasa Duygu Durumu (Llama-3 Analizi)")
return fig
# ---------------------------------------------------------
# GRADIO ARAYÜZÜ
# ---------------------------------------------------------
with gr.Blocks(theme=gr.themes.Soft(), title="Crypto News AI (Llama-3)") as app:
gr.Markdown("# 🦙 Kripto Haber Analizi (Llama-3 Destekli)")
gr.Markdown("Bu uygulama, duygu analizi için **Meta-Llama-3-8B-Instruct** kullanır (HF Serverless API).")
with gr.Tab("⚙️ Ayarlar & Başlat"):
hf_token_input = gr.Textbox(
label="Hugging Face Token (Gerekli)",
type="password",
placeholder="hf_xxxxx (ister Secrets->HF_TOKEN olarak da koyabilirsin)",
)
init_btn = gr.Button("🚀 Bağlantıyı Kur", variant="primary")
init_out = gr.Textbox(label="Sistem Durumu")
gr.Markdown("---")
fetch_btn = gr.Button("📰 Haberleri Çek ve Llama-3'e Sor", variant="secondary")
fetch_out = gr.Textbox(label="Log", lines=8)
fetch_table = gr.Dataframe(label="Analiz Sonuçları")
init_btn.click(initialize_models, inputs=[hf_token_input], outputs=[init_out])
fetch_btn.click(fetch_news, outputs=[fetch_out, fetch_table])
with gr.Tab("📊 Coin Analizi"):
coin_in = gr.Textbox(label="Coin İsmi (örn: Bitcoin)")
coin_btn = gr.Button("Analiz Et")
coin_report = gr.Markdown()
coin_plot = gr.Plot()
coin_data = gr.Dataframe()
coin_btn.click(analyze_coin_sentiment, inputs=[coin_in], outputs=[coin_report, coin_plot, coin_data])
with gr.Tab("🔎 Arama"):
search_in = gr.Textbox(label="Ne aramıştınız?")
search_btn = gr.Button("Bul")
search_res_txt = gr.Textbox(label="Sonuç")
search_res_df = gr.Dataframe()
search_btn.click(search_similar_news, inputs=[search_in], outputs=[search_res_txt, search_res_df])
with gr.Tab("📈 Genel Bakış"):
overview_btn = gr.Button("Grafiği Güncelle")
overview_plot = gr.Plot()
overview_btn.click(create_overview_chart, outputs=[overview_plot])
if __name__ == "__main__":
app.launch()