Update app.py
Browse files
app.py
CHANGED
|
@@ -1,13 +1,17 @@
|
|
| 1 |
import gradio as gr
|
| 2 |
import torch
|
| 3 |
import numpy as np
|
| 4 |
-
from chronos import Chronos2Pipeline
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
|
| 6 |
-
# --- MODELİ YÜKLE ---
|
| 7 |
-
print("🚀 Chronos-2 Modeli Yükleniyor...")
|
| 8 |
try:
|
| 9 |
-
pipeline =
|
| 10 |
-
|
| 11 |
device_map="cpu",
|
| 12 |
torch_dtype=torch.float32,
|
| 13 |
)
|
|
@@ -18,29 +22,42 @@ except Exception as e:
|
|
| 18 |
|
| 19 |
def predict(context_str, prediction_length):
|
| 20 |
if pipeline is None:
|
| 21 |
-
return "Error: Model yüklenemedi."
|
| 22 |
|
| 23 |
try:
|
|
|
|
| 24 |
clean_s = context_str.strip()
|
| 25 |
if not clean_s: return "Error: Veri boş."
|
| 26 |
|
| 27 |
data_list = [float(x) for x in clean_s.split(',')]
|
| 28 |
|
| 29 |
-
# Tensor Oluştur (1, 1, Zaman)
|
| 30 |
context_tensor = torch.tensor(data_list).unsqueeze(0).unsqueeze(0)
|
| 31 |
|
| 32 |
-
# Tahmin Yap
|
| 33 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 34 |
|
| 35 |
-
#
|
| 36 |
-
|
| 37 |
|
| 38 |
-
|
|
|
|
|
|
|
| 39 |
|
| 40 |
except Exception as e:
|
| 41 |
return f"Error: {str(e)}"
|
| 42 |
|
| 43 |
-
#
|
| 44 |
-
#
|
| 45 |
iface = gr.Interface(fn=predict, inputs=["text", "number"], outputs="text")
|
| 46 |
-
iface.launch()
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
import torch
|
| 3 |
import numpy as np
|
| 4 |
+
from chronos import ChronosPipeline # Chronos2Pipeline yerine genel Pipeline kullanımı daha güvenlidir
|
| 5 |
+
|
| 6 |
+
# --- MODEL AYARLARI ---
|
| 7 |
+
# Ücretsiz CPU için 'tiny' veya 'small' önerilir. 'base' bazen RAM patlatır.
|
| 8 |
+
MODEL_NAME = "amazon/chronos-t5-small"
|
| 9 |
+
|
| 10 |
+
print(f"🚀 {MODEL_NAME} Modeli Yükleniyor...")
|
| 11 |
|
|
|
|
|
|
|
| 12 |
try:
|
| 13 |
+
pipeline = ChronosPipeline.from_pretrained(
|
| 14 |
+
MODEL_NAME,
|
| 15 |
device_map="cpu",
|
| 16 |
torch_dtype=torch.float32,
|
| 17 |
)
|
|
|
|
| 22 |
|
| 23 |
def predict(context_str, prediction_length):
|
| 24 |
if pipeline is None:
|
| 25 |
+
return "Error: Model yüklenemedi (RAM yetersiz olabilir)."
|
| 26 |
|
| 27 |
try:
|
| 28 |
+
# 1. Veriyi Temizle
|
| 29 |
clean_s = context_str.strip()
|
| 30 |
if not clean_s: return "Error: Veri boş."
|
| 31 |
|
| 32 |
data_list = [float(x) for x in clean_s.split(',')]
|
| 33 |
|
| 34 |
+
# 2. Tensor Oluştur (Batch, Dim, Time) -> (1, 1, Zaman)
|
| 35 |
context_tensor = torch.tensor(data_list).unsqueeze(0).unsqueeze(0)
|
| 36 |
|
| 37 |
+
# 3. Tahmin Yap (12 adım ileri)
|
| 38 |
+
prediction_length = int(prediction_length)
|
| 39 |
+
forecast = pipeline.predict(context_tensor, prediction_length)
|
| 40 |
+
|
| 41 |
+
# 4. İstatistikleri Çıkar (Botun beklediği format)
|
| 42 |
+
# forecast[0] -> (num_samples, prediction_length)
|
| 43 |
+
|
| 44 |
+
# Medyan (Beklenen Fiyat)
|
| 45 |
+
median_price = forecast[0].quantile(0.5).item()
|
| 46 |
+
|
| 47 |
+
# Alt Sınır (%10 ihtimalle en kötü senaryo - Risk Hesabı için)
|
| 48 |
+
low_bound = forecast[0].quantile(0.1).item()
|
| 49 |
|
| 50 |
+
# Üst Sınır (%90 ihtimalle en iyi senaryo)
|
| 51 |
+
high_bound = forecast[0].quantile(0.9).item()
|
| 52 |
|
| 53 |
+
# 5. FORMATLAMA: "Medyan|Alt|Üst"
|
| 54 |
+
# Bot bu | işaretine göre parçalayıp okuyacak.
|
| 55 |
+
return f"{median_price}|{low_bound}|{high_bound}"
|
| 56 |
|
| 57 |
except Exception as e:
|
| 58 |
return f"Error: {str(e)}"
|
| 59 |
|
| 60 |
+
# --- ARAYÜZÜ BAŞLAT ---
|
| 61 |
+
# server_name="0.0.0.0" ve server_port=7860 OLMAZSA OLMAZ!
|
| 62 |
iface = gr.Interface(fn=predict, inputs=["text", "number"], outputs="text")
|
| 63 |
+
iface.launch(server_name="0.0.0.0", server_port=7860)
|