Spaces:
Build error
Build error
File size: 1,862 Bytes
b3ed540 d92bd41 b067a09 ec3154d d92bd41 b067a09 d92bd41 b067a09 d92bd41 481825b ec3154d b8ad19b b3ed540 d92bd41 42fa350 97c2dde b8ad19b 97c2dde b8ad19b 97c2dde 98a6eea 97c2dde 06d94b3 42fa350 b3ed540 42fa350 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 | import streamlit as st
import os
from huggingface_hub import login
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
# Access token from environment variables
hf_token = os.getenv("HUGGINGFACE_API_KEY")
login(token=hf_token)
model_name = "meta-llama/Llama-3.2-3B-Instruct"
rope_scaling = {
"type": "llama3", # or another valid type
"factor": 32.0 # your scaling factor, can be adjusted based on needs
}
# Ensure the model loading process uses the corrected `rope_scaling`
tokenizer = AutoTokenizer.from_pretrained(model_name, rope_scaling=rope_scaling)
model = AutoModelForCausalLM.from_pretrained(model_name, rope_scaling=rope_scaling)
# Define the generator function using the LLaMA model
generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
# Now you can proceed with your code as normal
def generate_debate(topic):
# Generate response from Bot A (Proponent)
bot_a_prompt = f"Let's debate about the topic '{topic}'. What are your thoughts?"
bot_a_response = generator(bot_a_prompt, max_length=200, num_return_sequences=1)[0]['generated_text']
# Generate response from Bot B (Opponent) based on Bot A's response
bot_b_prompt = f"Bot B, respond to the following: {bot_a_response} What is your counterargument?"
bot_b_response = generator(bot_b_prompt, max_length=200, num_return_sequences=1)[0]['generated_text']
# Display the debate in paragraph format without introductory text
st.subheader("Bot A (Proponent) Response:")
st.write(bot_a_response.strip())
st.subheader("Bot B (Opponent) Response:")
st.write(bot_b_response.strip())
# Streamlit interface for the user to enter a debate topic
st.title("Debate Bot")
topic_input = st.text_input("Enter debate topic:", "Dogs Are Cute If They Are Small")
if topic_input:
generate_debate(topic_input)
|