datacipen commited on
Commit
0a479c3
·
verified ·
1 Parent(s): 284b0e3

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +7 -8
main.py CHANGED
@@ -878,7 +878,7 @@ app.layout = dmc.MantineProvider(
878
  dcc.Location(id='redirect', refresh=True),
879
  dcc.Store(id='login-status', storage_type='session'),
880
  dcc.Store(id="history-store", storage_type="session", data=[]),
881
- dcc.Store(id="model-params-store", storage_type="session", data={"model": "mistralai/Mistral-Small-3.2-24B-Instruct-2506","temperature": 0.7,"max_tokens": 1024,"top_p": 0.9}),
882
  html.Div(id='user-status-div'),
883
  html.Div(id='page-content')
884
  ])])
@@ -890,7 +890,7 @@ def layout(**kwargs):
890
  dcc.Location(id='redirect', refresh=True),
891
  dcc.Store(id='login-status', storage_type='session'),
892
  dcc.Store(id="history-store", storage_type="session", data=[]),
893
- dcc.Store(id="model-params-store", storage_type="session", data={"model": "mistralai/Mistral-Small-3.2-24B-Instruct-2506","temperature": 0.7,"max_tokens": 1024,"top_p": 0.9}),
894
  html.Div(id='user-status-div'),
895
  html.Div(id='page-content')
896
  ]),
@@ -910,13 +910,12 @@ app_page = dmc.MantineProvider(
910
  dcc.Dropdown(
911
  id="model-dropdown",
912
  options=[
913
- {"label": "Mistral Small 3.1 24B", "value": "mistralai/Mistral-Small-3.1-24B-Instruct-2503"},
914
- {"label": "Magistral 24b small", "value": "magistral:24b-small-2506-q4_K_M"},
915
  {"label": "GPT oss", "value": "openai/gpt-oss-120b"},
916
  {"label": "Llama 3.3 70B", "value": "RedHatAI/Llama-3.3-70B-Instruct-FP8-dynamic"},
917
  {"label": "Mistral Small 3.2 24B", "value": "mistralai/Mistral-Small-3.2-24B-Instruct-2506"},
918
  ],
919
- value="mistralai/Mistral-Small-3.2-24B-Instruct-2506", style={"font-size": "0.75rem","color": "rgb(80,106,139)"},
920
  ),
921
  ], style={"margin-bottom": "20px"}),
922
 
@@ -1305,9 +1304,9 @@ app_avid_page = dmc.MantineProvider(
1305
  {"label": "deepseek R1 8b llama", "value": "deepseek-r1:8b-llama-distill-q4_K_M"},
1306
  {"label": "phi4 14b", "value": "phi4:14b-q8_0"},
1307
  {"label": "gemma3 27b", "value": "gemma3:27b"},
1308
- {"label": "Mistral Small 3.2 24B", "value": "mistralai/Mistral-Small-3.2-24B-Instruct-2506"},
1309
  ],
1310
- value="mistralai/Mistral-Small-3.2-24B-Instruct-2506", style={"font-size": "0.75rem","color": "rgb(80,106,139)"},
1311
  ),
1312
  ], style={"margin-bottom": "20px"}),
1313
 
@@ -2024,7 +2023,7 @@ def generate_competence(requete, categorie_selected, n_clicks, categorie, enseig
2024
  """
2025
  # Simulation de l'appel à l'API LLM (à remplacer par un vrai appel API)
2026
  try:
2027
- if model_params["model"] == "mistralai/Mistral-Small-3.2-24B-Instruct-2506":
2028
  baseURL = os.environ['BASEURL_ALBERT_API_KEY']
2029
  os.environ['ENDPOINT_API_KEY'] = os.environ['ENDPOINT_ALBERT_API_KEY']
2030
  else:
 
878
  dcc.Location(id='redirect', refresh=True),
879
  dcc.Store(id='login-status', storage_type='session'),
880
  dcc.Store(id="history-store", storage_type="session", data=[]),
881
+ dcc.Store(id="model-params-store", storage_type="session", data={"model": "openweight-large","temperature": 0.7,"max_tokens": 1024,"top_p": 0.9}),
882
  html.Div(id='user-status-div'),
883
  html.Div(id='page-content')
884
  ])])
 
890
  dcc.Location(id='redirect', refresh=True),
891
  dcc.Store(id='login-status', storage_type='session'),
892
  dcc.Store(id="history-store", storage_type="session", data=[]),
893
+ dcc.Store(id="model-params-store", storage_type="session", data={"model": "openweight-large","temperature": 0.7,"max_tokens": 1024,"top_p": 0.9}),
894
  html.Div(id='user-status-div'),
895
  html.Div(id='page-content')
896
  ]),
 
910
  dcc.Dropdown(
911
  id="model-dropdown",
912
  options=[
913
+ {"label": "openweight-large", "value": "openweight-large"},
 
914
  {"label": "GPT oss", "value": "openai/gpt-oss-120b"},
915
  {"label": "Llama 3.3 70B", "value": "RedHatAI/Llama-3.3-70B-Instruct-FP8-dynamic"},
916
  {"label": "Mistral Small 3.2 24B", "value": "mistralai/Mistral-Small-3.2-24B-Instruct-2506"},
917
  ],
918
+ value="openweight-large", style={"font-size": "0.75rem","color": "rgb(80,106,139)"},
919
  ),
920
  ], style={"margin-bottom": "20px"}),
921
 
 
1304
  {"label": "deepseek R1 8b llama", "value": "deepseek-r1:8b-llama-distill-q4_K_M"},
1305
  {"label": "phi4 14b", "value": "phi4:14b-q8_0"},
1306
  {"label": "gemma3 27b", "value": "gemma3:27b"},
1307
+ {"label": "openweight-large", "value": "openweight-large"},
1308
  ],
1309
+ value="openweight-large", style={"font-size": "0.75rem","color": "rgb(80,106,139)"},
1310
  ),
1311
  ], style={"margin-bottom": "20px"}),
1312
 
 
2023
  """
2024
  # Simulation de l'appel à l'API LLM (à remplacer par un vrai appel API)
2025
  try:
2026
+ if model_params["model"] == "openweight-large":
2027
  baseURL = os.environ['BASEURL_ALBERT_API_KEY']
2028
  os.environ['ENDPOINT_API_KEY'] = os.environ['ENDPOINT_ALBERT_API_KEY']
2029
  else: