Spaces:
Sleeping
Sleeping
| import openai # type: ignore | |
| # openai.api_key = os.getenv("OPENAI_API_KEY") | |
| client = openai.OpenAI() | |
| def respond( | |
| message, | |
| history: list[tuple[str, str]], | |
| system_message, | |
| max_tokens, | |
| temperature, | |
| top_p | |
| ): | |
| #read system message | |
| messages = [{"role": "system", "content": system_message}] | |
| #read history | |
| for val in history: | |
| if val[0]: | |
| messages.append({"role": "user", "content": val[0]}) | |
| if val[1]: | |
| messages.append({"role": "assistant", "content": val[1]}) | |
| #read output | |
| messages.append({"role": "user", "content": message}) | |
| print("## Messages: \n", messages) #debug output | |
| #create output | |
| response = client.responses.create( | |
| model="gpt-4.1-nano", | |
| input=messages, | |
| temperature=temperature, | |
| top_p=top_p, | |
| max_output_tokens=max_tokens | |
| ) | |
| #read output | |
| response = response.output_text | |
| print("## Response: ", response) #debug output | |
| print("\n") | |
| yield response #chat reply |