File size: 9,931 Bytes
6b94f58
8cc04fc
46a0645
6b94f58
8cc04fc
6b94f58
1f6dd11
e0155ae
eba31e8
af41ddd
eba31e8
 
 
af41ddd
b3dd9d4
 
6b94f58
8cc04fc
 
 
 
 
41da909
aea15c7
8cc04fc
 
6b94f58
af41ddd
6b94f58
8cc04fc
 
 
13a3392
af41ddd
13a3392
daec43f
13a3392
 
 
46a0645
 
 
 
 
 
af41ddd
46a0645
 
 
 
 
af41ddd
 
 
 
 
 
46a0645
af41ddd
13a3392
c47f1a1
ab872b3
6b94f58
ceb3db9
af41ddd
 
13a3392
af41ddd
 
c47f1a1
ab872b3
8cc04fc
5cd0f1d
af41ddd
 
8cc04fc
af41ddd
 
222b6c6
af41ddd
15f5510
e6f35c1
5cd0f1d
af41ddd
15f5510
5cd0f1d
af41ddd
15f5510
af41ddd
15f5510
af41ddd
 
 
 
 
 
 
 
 
 
 
 
 
46a0645
13a3392
6b94f58
13a3392
 
58ad01d
8cc04fc
c47f1a1
 
13a3392
 
 
 
3422997
8cc04fc
3422997
 
8cc04fc
3422997
 
 
c74cb99
af41ddd
10568a5
13a3392
8cc04fc
af41ddd
8cc04fc
c74cb99
8cc04fc
c74cb99
8cc04fc
 
af41ddd
c74cb99
 
 
c0b03d7
c74cb99
 
 
54d8776
c74cb99
 
 
af41ddd
8cc04fc
 
 
c0b03d7
8cc04fc
c74cb99
8cc04fc
c74cb99
8cc04fc
 
 
 
 
af41ddd
ab872b3
 
46a0645
ab872b3
 
 
ad51131
46a0645
 
 
ab872b3
 
 
 
 
 
aea15c7
 
46a0645
 
ab872b3
 
46a0645
 
 
aea15c7
 
 
 
 
 
ad51131
aea15c7
 
 
 
ab872b3
 
46a0645
 
 
 
 
 
 
ad51131
aea15c7
ad51131
aea15c7
ad51131
 
 
 
 
46a0645
ab872b3
46a0645
ab872b3
 
 
ad51131
3bb4b91
 
5856d0c
af41ddd
223743e
 
 
 
 
 
 
 
 
 
e6f35c1
5856d0c
223743e
 
4b237f5
 
223743e
4b237f5
 
e6f35c1
4b237f5
 
 
 
 
 
 
 
46a0645
5856d0c
43c4965
5856d0c
 
 
 
46a0645
5856d0c
576fbad
5856d0c
6b94f58
ab872b3
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
import pandas as pd
import yfinance as yf
from datasets import Dataset, load_dataset
from transformers import pipeline
import plotly.graph_objects as go
import gradio as gr
from huggingface_hub import login
import os

# Login Hugging Face
token = os.getenv("HF_TOKEN")
login(token=token)

# --- Costanti ---
HF_DATASET = "SelmaNajih001/Cnbc_MultiCompany"
HF_PRIVATE_DATASET = "SelmaNajih001/portfolio_strategy_data2"
MODEL_SENTIMENT = "SelmaNajih001/SentimentBasedOnPriceVariation"
MODEL_PRICE_TESLA = "SelmaNajih001/PricePredictionForTesla"
MODEL_PRICE_MICROSOFT = "SelmaNajih001/PricePredictionForMicrosoft"
MODEL_FINBERT = "ProsusAI/finbert"

TICKERS = {
    "Tesla": "TSLA", #Tesla, Inc.
    "Microsoft": "MSFT"
}
companies = list(TICKERS.keys())

# --- Pipelines ---
sentiment_pipeline = pipeline("sentiment-analysis", model=MODEL_SENTIMENT)
price_pipeline_tesla = pipeline("text-classification", model=MODEL_PRICE_TESLA)
price_pipeline_msft = pipeline("text-classification", model=MODEL_PRICE_MICROSOFT)
finbert_pipeline = pipeline("sentiment-analysis", model=MODEL_FINBERT)

# --- Caricamento dataset ---
df_multi = pd.DataFrame(load_dataset(HF_DATASET)["train"])
df_multi['date'] = pd.to_datetime(df_multi['Date'], errors='coerce')
df_multi['date_merge'] = df_multi['date'].dt.normalize()
df_multi.sort_values('date', inplace=True)

try:
    ds_existing = load_dataset(HF_PRIVATE_DATASET)["train"]
    df_existing = pd.DataFrame(ds_existing)
except:
    df_existing = pd.DataFrame()

# --- Determina nuove righe ---
if not df_existing.empty:
    df_to_add = df_multi[~df_multi['Date'].isin(df_existing['Date'])]
else:
    df_to_add = df_multi.copy()

# --- Calcolo solo sulle nuove righe ---
df_to_add['Sentiment'] = ""
df_to_add['Confidence'] = 0.0
df_to_add['Predicted'] = 0.0
df_to_add['FinBERT_Sentiment'] = ""
df_to_add['FinBERT_Confidence'] = 0.0

for i, row in df_to_add.iterrows():
    company = row['Company']

    # Custom sentiment
    try:
        res = sentiment_pipeline(row['Title'])[0]
        df_to_add.at[i,'Sentiment'] = res['label'].upper().strip()
        df_to_add.at[i,'Confidence'] = res['score']
    except:
        df_to_add.at[i,'Sentiment'] = 'ERROR'
        df_to_add.at[i,'Confidence'] = 0.0

    # FinBERT
    try:
        res_f = finbert_pipeline(row['Title'])[0]
        df_to_add.at[i,'FinBERT_Sentiment'] = res_f['label'].upper().strip()
        df_to_add.at[i,'FinBERT_Confidence'] = res_f['score']
    except:
        df_to_add.at[i,'FinBERT_Sentiment'] = 'ERROR'
        df_to_add.at[i,'FinBERT_Confidence'] = 0.0

    # Regression
    try:
        if company == "Tesla":
            val = price_pipeline_tesla(row['Title'])[0]['score']
            df_to_add.at[i,'Predicted'] = max(val, 1.0)
        elif company == "Microsoft":
            val = price_pipeline_msft(row['Title'])[0]['score']
            df_to_add.at[i,'Predicted'] = max(val, 1.0)
    except:
        df_to_add.at[i,'Predicted'] = 0.0

# --- Aggiorna dataset esistente ---
if not df_existing.empty:
    df_updated = pd.concat([df_existing, df_to_add], ignore_index=True)
else:
    df_updated = df_to_add.copy()

# --- Push su Hugging Face ---
hf_dataset_updated = Dataset.from_pandas(df_updated)
hf_dataset_updated.push_to_hub(HF_PRIVATE_DATASET, private=True)
print(f"Dataset aggiornato su Hugging Face: {HF_PRIVATE_DATASET}")

# --- Resto del codice (prezzi, strategie, Gradio) ---
df_multi = df_updated.copy()

prices = {}
for company, ticker in TICKERS.items():
    start_date = df_multi[df_multi['Company']==company]['date'].min()
    end_date = pd.Timestamp.today()
    df_prices = yf.download(ticker, start=start_date, end=end_date)[['Close']].reset_index()
    df_prices.columns = ['Date_', f'Close_{ticker}']
    df_prices['date_merge'] = pd.to_datetime(df_prices['Date_']).dt.normalize()
    df_prices['PctChangeDaily'] = df_prices[f'Close_{ticker}'].pct_change().shift(-1)
    prices[company] = df_prices

dfs_final = {}
for company in companies:
    df_c = df_multi[df_multi['Company'] == company].copy()

    if company in prices:
        df_c = pd.merge(df_c, prices[company], on='date_merge', how='inner')

    df_c['Day'] = df_c['date'].dt.date
    df_c['Month'] = df_c['date'].dt.to_period('M').dt.to_timestamp()
    df_c['Year'] = df_c['date'].dt.year
    
    # Strategy A
    df_c['StrategyA_Cumulative'] = 0.0
    for i in range(1, len(df_c)):
        pct = df_c.loc[i, 'PctChangeDaily'] if pd.notnull(df_c.loc[i,'PctChangeDaily']) else 0
        price = df_c.loc[i-1, f'Close_{TICKERS[company]}']
        if df_c.loc[i, 'Sentiment'] == "UP" and df_c.loc[i,'Confidence'] > 0.8:
            df_c.loc[i,'StrategyA_Cumulative'] = df_c.loc[i-1,'StrategyA_Cumulative'] + price * pct
        elif df_c.loc[i, 'Sentiment'] == "DOWN" and df_c.loc[i,'Confidence'] > 0.8:
            df_c.loc[i,'StrategyA_Cumulative'] = df_c.loc[i-1,'StrategyA_Cumulative'] - price * pct
        else:
            df_c.loc[i,'StrategyA_Cumulative'] = df_c.loc[i-1,'StrategyA_Cumulative']
    # Strategy B
    df_c['StrategyB_Cumulative'] = 0.0
    for i in range(1, len(df_c)):
        pct = df_c.loc[i, 'PctChangeDaily'] if pd.notnull(df_c.loc[i,'PctChangeDaily']) else 0
        price = df_c.loc[i-1, f'Close_{TICKERS[company]}']
        predicted = df_c.loc[i, 'Predicted']
        if predicted > 1:
            df_c.loc[i,'StrategyB_Cumulative'] = df_c.loc[i-1,'StrategyB_Cumulative'] + price * pct
        elif predicted < -1:
            df_c.loc[i,'StrategyB_Cumulative'] = df_c.loc[i-1,'StrategyB_Cumulative'] - price * pct
        else:
            df_c.loc[i,'StrategyB_Cumulative'] = df_c.loc[i-1,'StrategyB_Cumulative']
    # Strategy C
    df_c['StrategyC_Cumulative'] = 0.0
    for i in range(1, len(df_c)):
        pct = df_c.loc[i, 'PctChangeDaily'] if pd.notnull(df_c.loc[i,'PctChangeDaily']) else 0
        price = df_c.loc[i-1, f'Close_{TICKERS[company]}']
        if df_c.loc[i, 'FinBERT_Sentiment'] == "POSITIVE" and df_c.loc[i,'FinBERT_Confidence'] > 0.8:
            df_c.loc[i,'StrategyC_Cumulative'] = df_c.loc[i-1,'StrategyC_Cumulative'] + price * pct
        elif df_c.loc[i, 'FinBERT_Sentiment'] == "NEGATIVE" and df_c.loc[i,'FinBERT_Confidence'] > 0.8:
            df_c.loc[i,'StrategyC_Cumulative'] = df_c.loc[i-1,'StrategyC_Cumulative'] - price * pct
        else:
            df_c.loc[i,'StrategyC_Cumulative'] = df_c.loc[i-1,'StrategyC_Cumulative']

    dfs_final[company] = df_c.drop(columns=["date", "date_merge"], errors="ignore")

# --- Funzione Gradio ---
def show_company_data(selected_companies, aggregation="Day"):
    if not selected_companies:
        return pd.DataFrame(), None, None  

    agg_col = {"Day": "Day", "Month": "Month", "Year": "Year"}.get(aggregation, "Day")

    fig_strat = go.Figure()
    fig_price = go.Figure()
    dfs_display = []

    for c in selected_companies:
        if c not in dfs_final:
            continue
        df_c = dfs_final[c]

        df_grouped = df_c.groupby(agg_col).agg({
            'StrategyA_Cumulative': 'last',
            'StrategyB_Cumulative': 'last',
            'StrategyC_Cumulative': 'last',
            f'Close_{TICKERS[c]}': 'last'
        }).reset_index()

        df_grouped['Company'] = c
        dfs_display.append(df_grouped)

        strategy_labels = {
            'StrategyA_Cumulative': "Custom Sentiment",
            'StrategyB_Cumulative': "Regression",
            'StrategyC_Cumulative': "FinBERT"
        }
        for strat in ['StrategyA_Cumulative', 'StrategyB_Cumulative', 'StrategyC_Cumulative']:
            fig_strat.add_trace(go.Scatter(
                x=df_grouped[agg_col],
                y=df_grouped[strat],
                mode="lines",
                name=f"{c} - {strategy_labels[strat]}"
            ))

        fig_price.add_trace(go.Scatter(
            x=df_grouped[agg_col],
            y=df_grouped[f'Close_{TICKERS[c]}'],
            mode="lines",
            name=f"{c} Price"
        ))

    fig_strat.update_layout(
        title="Strategies Comparison (Custom Sentiment, Regression, FinBERT)",
        xaxis_title=aggregation,
        yaxis_title="Cumulative Value",
        template="plotly_dark",
        hovermode="x unified"
    )

    fig_price.update_layout(
        title="Stock Prices",
        xaxis_title=aggregation,
        yaxis_title="Price",
        template="plotly_dark",
        hovermode="x unified"
    )

    #df_display = pd.concat(dfs_display, ignore_index=True) if dfs_display else pd.DataFrame()
    return fig_strat, fig_price

# --- Gradio Interface ---
description_text = """
### Portfolio Strategy Comparison Dashboard
This dashboard allows you to compare the performance of three sentiment models in driving trading strategies for Microsoft and Tesla.
- **Strategy logic**: Each model's score (or regression value) is used as a buy/sell signal. 
  - If the score exceeds 0.8 β†’ buy
  - If the score is below -0.8 β†’ sell
  - Otherwise β†’ no trade
  - For the regression model, thresholds are +1 and -1.
"""

companies = ["Microsoft", "Tesla"]

with gr.Blocks() as demo:
    gr.Markdown("# Portfolio Strategy Dashboard")
    gr.Markdown(description_text)

    with gr.Row():
        dropdown_companies = gr.Dropdown(
            choices=companies,
            value=["Microsoft", "Tesla"],
            multiselect=True,
            label="Select Companies"
        )
        radio_aggregation = gr.Radio(
            choices=["Day", "Month", "Year"],
            value="Day",
            label="Aggregation Level"
        )
        submit_btn = gr.Button("Submit")

    #data_table = gr.Dataframe(label="Data Preview", type="pandas")
    strategies_plot = gr.Plot(label="Strategies")
    prices_plot = gr.Plot(label="Prices")

    submit_btn.click(
        fn=show_company_data,
        inputs=[dropdown_companies, radio_aggregation],
        outputs=[strategies_plot, prices_plot] #data_table in caso da aggiungere dopo
    )

demo.launch()