MuhammadHafizFassya commited on
Commit
6fe3014
·
verified ·
1 Parent(s): e1b87e1

Upload 10 files

Browse files
app/__pycache__/main.cpython-314.pyc ADDED
Binary file (1.36 kB). View file
 
app/core/__pycache__/config.cpython-314.pyc ADDED
Binary file (1.74 kB). View file
 
app/core/config.py ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pydantic_settings import BaseSettings
2
+ from typing import List
3
+ import os
4
+
5
+ class Settings(BaseSettings):
6
+ PROJECT_NAME: str = "Amazon Chronos Forecasting API"
7
+ HOST: str = "0.0.0.0"
8
+ PORT: int = 8000
9
+
10
+ # Model Configuration
11
+ MODEL_ID: str = "amazon/chronos-t5-tiny"
12
+ MODEL_PATH: str = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../chronos-t5-tiny"))
13
+ MODEL_SOURCE: str = "remote" # 'local' or 'remote'
14
+
15
+ # Lepaskan CORS agar bisa diakses dari mana saja (Vercel)
16
+ CORS_ORIGINS: List[str] = ["*"]
17
+
18
+ class Config:
19
+ env_file = ".env"
20
+
21
+ settings = Settings()
app/main.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from fastapi.middleware.cors import CORSMiddleware
3
+ from app.routes import forecast
4
+ from app.core.config import settings
5
+
6
+ app = FastAPI(title=settings.PROJECT_NAME)
7
+
8
+ # Set up CORS
9
+ app.add_middleware(
10
+ CORSMiddleware,
11
+ allow_origins=settings.CORS_ORIGINS,
12
+ allow_credentials=True,
13
+ allow_methods=["*"],
14
+ allow_headers=["*"],
15
+ )
16
+
17
+ @app.get("/health")
18
+ async def health_check():
19
+ return {"status": "healthy", "service": settings.PROJECT_NAME}
20
+
21
+ # Include routes
22
+ app.include_router(forecast.router, prefix="/api", tags=["Forecast"])
23
+
24
+ if __name__ == "__main__":
25
+ import uvicorn
26
+ uvicorn.run("app.main:app", host=settings.HOST, port=settings.PORT, reload=True)
app/routes/__pycache__/forecast.cpython-314.pyc ADDED
Binary file (2.17 kB). View file
 
app/routes/forecast.py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import APIRouter, HTTPException
2
+ from app.schemas.forecast import ForecastRequest, ForecastResponse
3
+ from app.services.chronos_service import chronos_service
4
+ from app.core.config import settings
5
+
6
+ router = APIRouter()
7
+
8
+ @router.post("/forecast", response_model=ForecastResponse)
9
+ async def get_forecast(request: ForecastRequest):
10
+ try:
11
+ # Perform prediction
12
+ result = chronos_service.predict(request.series, request.prediction_length)
13
+
14
+ # Calculate some simple insight
15
+ last_val = request.series[-1]
16
+ first_forecast = result["forecast"][0]
17
+ change = ((first_forecast - last_val) / last_val) * 100
18
+ trend = "naik" if change > 0 else "turun"
19
+ insight = f"Tren diprediksi akan {trend} sebesar {abs(change):.1f}% pada langkah pertama."
20
+
21
+ return ForecastResponse(
22
+ context_name=request.context_name,
23
+ input_length=len(request.series),
24
+ prediction_length=request.prediction_length,
25
+ forecast=result["forecast"],
26
+ lower_bound=result.get("lower_bound"),
27
+ upper_bound=result.get("upper_bound"),
28
+ model_name=settings.MODEL_ID,
29
+ insight=insight
30
+ )
31
+ except Exception as e:
32
+ raise HTTPException(status_code=500, detail=f"Forecasting failed: {str(e)}")
app/schemas/__pycache__/forecast.cpython-314.pyc ADDED
Binary file (2.67 kB). View file
 
app/schemas/forecast.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pydantic import BaseModel, Field, field_validator
2
+ from typing import List, Optional
3
+
4
+ class ForecastRequest(BaseModel):
5
+ series: List[float] = Field(..., min_items=5, description="Historical time series data")
6
+ prediction_length: int = Field(..., ge=1, le=30, description="Number of steps to forecast")
7
+ context_name: Optional[str] = Field("Untitled Series", description="Name of the context for the series")
8
+
9
+ @field_validator('series')
10
+ @classmethod
11
+ def validate_series(cls, v: List[float]) -> List[float]:
12
+ if not v:
13
+ raise ValueError("Series cannot be empty")
14
+ return v
15
+
16
+ class ForecastResponse(BaseModel):
17
+ context_name: str
18
+ input_length: int
19
+ prediction_length: int
20
+ forecast: List[float]
21
+ lower_bound: Optional[List[float]] = None
22
+ upper_bound: Optional[List[float]] = None
23
+ model_name: str
24
+ insight: Optional[str] = None
app/services/__pycache__/chronos_service.cpython-314.pyc ADDED
Binary file (5.29 kB). View file
 
app/services/chronos_service.py ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from chronos import ChronosPipeline
3
+ from app.core.config import settings
4
+ import numpy as np
5
+ import os
6
+
7
+ class ChronosService:
8
+ def __init__(self):
9
+ self.pipeline = None
10
+ self.model_name = settings.MODEL_ID
11
+ self.device = "cuda" if torch.cuda.is_available() else "cpu"
12
+ self.load_model()
13
+
14
+ def load_model(self):
15
+ try:
16
+ print(f"Loading model {self.model_name} from {settings.MODEL_SOURCE}...")
17
+ # Ensure path is absolute for safety
18
+ model_path = settings.MODEL_PATH
19
+ if not os.path.exists(model_path) and settings.MODEL_SOURCE == "local":
20
+ print(f"Warning: Local model path {model_path} not found. Falling back to remote.")
21
+ source = settings.MODEL_ID
22
+ else:
23
+ source = model_path if settings.MODEL_SOURCE == "local" else settings.MODEL_ID
24
+
25
+ self.pipeline = ChronosPipeline.from_pretrained(
26
+ source,
27
+ device_map=self.device,
28
+ torch_dtype=torch.bfloat16 if self.device == "cuda" else torch.float32,
29
+ )
30
+ print("Model loaded successfully.")
31
+ except Exception as e:
32
+ print(f"Error loading model: {e}")
33
+ self.pipeline = None
34
+ print("ChronosService will operate in MOCK MODE.")
35
+
36
+ def predict(self, series: list, prediction_length: int):
37
+ if self.pipeline is None:
38
+ return self._mock_predict(series, prediction_length)
39
+
40
+ try:
41
+ # Chronos expects a 1D or 2D tensor
42
+ context = torch.tensor(series, dtype=torch.float32)
43
+
44
+ # The model predicts multiple samples
45
+ forecast = self.pipeline.predict(
46
+ context,
47
+ prediction_length,
48
+ num_samples=20,
49
+ ) # shape [num_samples, prediction_length] or [1, num_samples, prediction_length]
50
+
51
+ # Forecast is usually [batch, samples, horizon]
52
+ # Since we pass 1 series, it's [samples, horizon] or [1, samples, horizon]
53
+ samples = forecast[0] if len(forecast.shape) == 3 else forecast
54
+
55
+ # Calculate median, lower and upper bounds (10th and 90th percentiles)
56
+ low = np.quantile(samples.numpy(), 0.1, axis=0)
57
+ median = np.quantile(samples.numpy(), 0.5, axis=0)
58
+ high = np.quantile(samples.numpy(), 0.9, axis=0)
59
+
60
+ return {
61
+ "forecast": median.tolist(),
62
+ "lower_bound": low.tolist(),
63
+ "upper_bound": high.tolist()
64
+ }
65
+ except Exception as e:
66
+ print(f"Prediction error: {e}")
67
+ return self._mock_predict(series, prediction_length)
68
+
69
+ def _mock_predict(self, series: list, prediction_length: int):
70
+ last_val = series[-1]
71
+ # Simple linear trend + some noise for mock
72
+ forecast = []
73
+ current = last_val
74
+ for i in range(1, prediction_length + 1):
75
+ current = current * (1 + 0.05 * (np.random.random() - 0.2)) # Slight upward trend
76
+ forecast.append(float(current))
77
+
78
+ low = [v * 0.9 for v in forecast]
79
+ high = [v * 1.1 for v in forecast]
80
+ return {
81
+ "forecast": forecast,
82
+ "lower_bound": low,
83
+ "upper_bound": high
84
+ }
85
+
86
+ # Singleton instance
87
+ chronos_service = ChronosService()