Spaces:
Build error
Build error
| import gradio as gr | |
| from transformers import AutoTokenizer, AutoModelForCausalLM | |
| # Load the Marco-o1 model and tokenizer | |
| model_name = "AIDC-AI/Marco-o1" | |
| tokenizer = AutoTokenizer.from_pretrained(model_name) | |
| model = AutoModelForCausalLM.from_pretrained(model_name) | |
| # Conversation function | |
| def ai_conversation(prompt, history=[]): | |
| # Ensure the history starts properly | |
| if history is None: | |
| history = [] | |
| # AI 1 generates a response | |
| input_ids = tokenizer.encode(prompt + tokenizer.eos_token, return_tensors="pt") | |
| response_ids = model.generate(input_ids, max_length=200, pad_token_id=tokenizer.eos_token_id) | |
| response1 = tokenizer.decode(response_ids[:, input_ids.shape[-1]:][0], skip_special_tokens=True) | |
| history.append(("You", prompt)) | |
| history.append(("AI 1", response1)) | |
| # AI 2 responds to AI 1 | |
| input_ids = tokenizer.encode(response1 + tokenizer.eos_token, return_tensors="pt") | |
| response_ids = model.generate(input_ids, max_length=200, pad_token_id=tokenizer.eos_token_id) | |
| response2 = tokenizer.decode(response_ids[:, input_ids.shape[-1]:][0], skip_special_tokens=True) | |
| history.append(("AI 1", response1)) | |
| history.append(("AI 2", response2)) | |
| return history, history | |
| # Gradio Interface | |
| interface = gr.Interface( | |
| fn=ai_conversation, | |
| inputs=["text", "state"], | |
| outputs=["chatbot", "state"], | |
| title="Marco-o1 Group Chat Simulation", | |
| description="Type a message to start a group chat between two AI instances." | |
| ) | |
| if __name__ == "__main__": | |
| interface.launch() |