Nicolás Larenas commited on
Commit
41b4bf3
·
verified ·
1 Parent(s): 5560522

Update ai_model.py

Browse files
Files changed (1) hide show
  1. ai_model.py +8 -13
ai_model.py CHANGED
@@ -3,6 +3,7 @@
3
  import google.generativeai as genai
4
  import os
5
  import logging
 
6
  from config import (
7
  SYSTEM_INSTRUCTION,
8
  MODEL_NAME,
@@ -12,7 +13,7 @@ from config import (
12
  DEFAULT_TOP_K,
13
  DEFAULT_STOP_SEQUENCES,
14
  )
15
- from typing import List, Optional, Dict
16
 
17
  # Configure logging
18
  logging.basicConfig(level=logging.ERROR)
@@ -33,14 +34,11 @@ model = genai.GenerativeModel(
33
  # Preprocess chat history to the required format
34
  def preprocess_chat_history(history: List[tuple]) -> List[Dict[str, str]]:
35
  messages = []
36
- for user_message, model_message in history:
37
- if isinstance(user_message, tuple):
38
- # If user_message is a tuple (e.g., contains images), skip for text-only model
39
- continue
40
- elif user_message is not None:
41
  messages.append({'role': 'user', 'content': user_message})
42
- if model_message is not None:
43
- messages.append({'role': 'assistant', 'content': model_message})
44
  return messages
45
 
46
  # Query AI model
@@ -65,7 +63,7 @@ async def query_ai_model(
65
  temperature=temperature,
66
  top_p=top_p,
67
  top_k=top_k,
68
- max_output_tokens=int(max_output_tokens),
69
  stop_sequences=stop_sequences,
70
  )
71
 
@@ -78,7 +76,4 @@ async def query_ai_model(
78
  # Extract the assistant's reply
79
  assistant_reply = {'role': 'assistant', 'content': response.text}
80
 
81
- return assistant_reply
82
- except Exception as e:
83
- logging.error("Error in query_ai_model", exc_info=True)
84
- return {'role': 'assistant', 'content': f"An error occurred: {str(e)}"}
 
3
  import google.generativeai as genai
4
  import os
5
  import logging
6
+ from typing import List, Dict, Optional
7
  from config import (
8
  SYSTEM_INSTRUCTION,
9
  MODEL_NAME,
 
13
  DEFAULT_TOP_K,
14
  DEFAULT_STOP_SEQUENCES,
15
  )
16
+ import asyncio
17
 
18
  # Configure logging
19
  logging.basicConfig(level=logging.ERROR)
 
34
  # Preprocess chat history to the required format
35
  def preprocess_chat_history(history: List[tuple]) -> List[Dict[str, str]]:
36
  messages = []
37
+ for user_message, assistant_message in history:
38
+ if user_message is not None:
 
 
 
39
  messages.append({'role': 'user', 'content': user_message})
40
+ if assistant_message is not None:
41
+ messages.append({'role': 'assistant', 'content': assistant_message})
42
  return messages
43
 
44
  # Query AI model
 
63
  temperature=temperature,
64
  top_p=top_p,
65
  top_k=top_k,
66
+ max_output_tokens=max_output_tokens,
67
  stop_sequences=stop_sequences,
68
  )
69
 
 
76
  # Extract the assistant's reply
77
  assistant_reply = {'role': 'assistant', 'content': response.text}
78
 
79
+ return