|
|
|
|
|
|
|
|
import os |
|
|
import json |
|
|
import mesop as me |
|
|
import mesop.labs as mel |
|
|
from dotenv import load_dotenv |
|
|
import google.generativeai as genai |
|
|
from google.generativeai.types.generation_types import GenerateContentResponse |
|
|
from typing import Generator |
|
|
|
|
|
|
|
|
DEFAULT_CONFIG_PATH = "./config.json" |
|
|
DEFAULT_MODEL_NAME = "learnlm-1.5-pro-experimental" |
|
|
|
|
|
|
|
|
load_dotenv() |
|
|
|
|
|
rolemap = {"user": "user", "assistant": "model"} |
|
|
|
|
|
|
|
|
genai.configure(api_key=os.getenv("GOOGLE_API_KEY")) |
|
|
|
|
|
|
|
|
generation_config = { |
|
|
"temperature": 1, |
|
|
"top_p": 0.95, |
|
|
"top_k": 64, |
|
|
"max_output_tokens": 8192, |
|
|
"response_mime_type": "text/plain", |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
_config: dict|None = None |
|
|
def _load_config(): |
|
|
global _config |
|
|
global generation_config |
|
|
config_path = os.environ.get("CHAT_CONFIG_PATH", DEFAULT_CONFIG_PATH) |
|
|
try: |
|
|
with open(config_path, 'r') as f: |
|
|
_config = json.load(f) |
|
|
|
|
|
if _config: |
|
|
generation_config.update(_config.get('generation_config', generation_config)) |
|
|
|
|
|
except FileNotFoundError: |
|
|
print(f"Warning: Could not read config file at: {config_path}") |
|
|
except json.JSONDecodeError as e: |
|
|
print(f"Error parsing config file: {e}") |
|
|
|
|
|
_load_config() |
|
|
|
|
|
|
|
|
model = genai.GenerativeModel( |
|
|
model_name=os.environ.get("MODEL_NAME", DEFAULT_MODEL_NAME), |
|
|
generation_config=generation_config, |
|
|
system_instruction=_config['prompt']['es'] |
|
|
) |
|
|
|
|
|
|
|
|
def on_load(e: me.LoadEvent): |
|
|
print("***On load event***") |
|
|
|
|
|
|
|
|
|
|
|
@me.stateclass |
|
|
class FirstState: |
|
|
first:str|None = None |
|
|
|
|
|
|
|
|
@me.page( |
|
|
security_policy=me.SecurityPolicy( |
|
|
allowed_iframe_parents=["https://google.github.io", "https://huggingface.co"] |
|
|
), |
|
|
path="/", |
|
|
title="Mesop Demo Chat", |
|
|
) |
|
|
def page(): |
|
|
if _config: |
|
|
try: |
|
|
welcome_message = _config["welcome_message"] |
|
|
|
|
|
except KeyError: |
|
|
print("Error: 'welcome_message' not found in config file.") |
|
|
else: |
|
|
print("Config not loaded, using default values.") |
|
|
me.text("Welcome to the Chat (Default)") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
state = me.state(FirstState) |
|
|
if 'start_prompt' in me.query_params: |
|
|
start_prompt:str = me.query_params['start_prompt'] |
|
|
|
|
|
del me.query_params['start_prompt'] |
|
|
state.first = start_prompt |
|
|
|
|
|
mel.chat(transform, title="DSA Tutor", bot_user="Tutor", ) |
|
|
|
|
|
|
|
|
def transform(input: str, history: list[mel.ChatMessage]) -> Generator[str, None, None]: |
|
|
messages = [] |
|
|
state = me.state(FirstState) |
|
|
if state.first: |
|
|
|
|
|
messages.append({"role": "user", "parts": [state.first]}) |
|
|
messages.extend([ |
|
|
{"role": rolemap[message.role], "parts": [message.content]} |
|
|
for message in history |
|
|
]) |
|
|
|
|
|
chat_session = model.start_chat(history=messages) |
|
|
response:GenerateContentResponse = chat_session.send_message(input, stream=True) |
|
|
text = "" |
|
|
for chunk in response: |
|
|
text += chunk.text |
|
|
yield chunk.text |
|
|
|
|
|
|
|
|
|
|
|
|